diff --git a/src/org/hibernate/AssertionFailure.java b/src/org/hibernate/AssertionFailure.java new file mode 100644 index 0000000000..171519f0cf --- /dev/null +++ b/src/org/hibernate/AssertionFailure.java @@ -0,0 +1,36 @@ +//$Id$ +package org.hibernate; + +import org.hibernate.exception.NestableRuntimeException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Indicates failure of an assertion: a possible bug in Hibernate. + * + * @author Gavin King + */ + +public class AssertionFailure extends NestableRuntimeException { + + private static final Log log = LogFactory.getLog(AssertionFailure.class); + + private static final String MESSAGE = "an assertion failure occured (this may indicate a bug in Hibernate, but is more likely due to unsafe use of the session)"; + + public AssertionFailure(String s) { + super(s); + log.error(MESSAGE, this); + } + + public AssertionFailure(String s, Throwable t) { + super(s, t); + log.error(MESSAGE, t); + } + +} + + + + + + diff --git a/src/org/hibernate/CacheMode.java b/src/org/hibernate/CacheMode.java new file mode 100755 index 0000000000..b713a18407 --- /dev/null +++ b/src/org/hibernate/CacheMode.java @@ -0,0 +1,78 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +/** + * Controls how the session interacts with the second-level + * cache and query cache. + * + * @see Session#setCacheMode(CacheMode) + * @author Gavin King + */ +public final class CacheMode implements Serializable { + private final String name; + private final boolean isPutEnabled; + private final boolean isGetEnabled; + private static final Map INSTANCES = new HashMap(); + + private CacheMode(String name, boolean isPutEnabled, boolean isGetEnabled) { + this.name=name; + this.isPutEnabled = isPutEnabled; + this.isGetEnabled = isGetEnabled; + } + public String toString() { + return name; + } + public boolean isPutEnabled() { + return isPutEnabled; + } + public boolean isGetEnabled() { + return isGetEnabled; + } + /** + * The session may read items from the cache, and add items to the cache + */ + public static final CacheMode NORMAL = new CacheMode("NORMAL", true, true); + /** + * The session will never interact with the cache, except to invalidate + * cache items when updates occur + */ + public static final CacheMode IGNORE = new CacheMode("IGNORE", false, false); + /** + * The session may read items from the cache, but will not add items, + * except to invalidate items when updates occur + */ + public static final CacheMode GET = new CacheMode("GET", false, true); + /** + * The session will never read items from the cache, but will add items + * to the cache as it reads them from the database. + */ + public static final CacheMode PUT = new CacheMode("PUT", true, false); + + /** + * The session will never read items from the cache, but will add items + * to the cache as it reads them from the database. In this mode, the + * effect of hibernate.cache.use_minimal_puts is bypassed, in + * order to force a cache refresh + */ + public static final CacheMode REFRESH = new CacheMode("REFRESH", true, false); + + static { + INSTANCES.put( NORMAL.name, NORMAL ); + INSTANCES.put( IGNORE.name, IGNORE ); + INSTANCES.put( GET.name, GET ); + INSTANCES.put( PUT.name, PUT ); + INSTANCES.put( REFRESH.name, REFRESH ); + } + + private Object readResolve() { + return INSTANCES.get( name ); + } + + public static CacheMode parse(String name) { + return ( CacheMode ) INSTANCES.get( name ); + } +} diff --git a/src/org/hibernate/CallbackException.java b/src/org/hibernate/CallbackException.java new file mode 100644 index 0000000000..f63d661f45 --- /dev/null +++ b/src/org/hibernate/CallbackException.java @@ -0,0 +1,34 @@ +//$Id$ +package org.hibernate; + + +/** + * Should be thrown by persistent objects from Lifecycle + * or Interceptor callbacks. + * + * @see Lifecycle + * @see Interceptor + * @author Gavin King + */ + +public class CallbackException extends HibernateException { + + public CallbackException(Exception root) { + super("An exception occurred in a callback", root); + } + + public CallbackException(String message) { + super(message); + } + + public CallbackException(String message, Exception e) { + super(message, e); + } + +} + + + + + + diff --git a/src/org/hibernate/ConnectionReleaseMode.java b/src/org/hibernate/ConnectionReleaseMode.java new file mode 100644 index 0000000000..d36f4b3a88 --- /dev/null +++ b/src/org/hibernate/ConnectionReleaseMode.java @@ -0,0 +1,77 @@ +// $Id$ +package org.hibernate; + +import java.io.Serializable; + +/** + * Defines the various policies by which Hibernate might release its underlying + * JDBC connection. + * + * @author Steve Ebersole + */ +public class ConnectionReleaseMode implements Serializable { + + /** + * Indicates that JDBC connection should be aggressively released after each + * SQL statement is executed. In this mode, the application must + * explicitly close all iterators and scrollable results. This mode may + * only be used with a JTA datasource. + */ + public static final ConnectionReleaseMode AFTER_STATEMENT = new ConnectionReleaseMode( "after_statement" ); + + /** + * Indicates that JDBC connections should be released after each transaction + * ends (works with both JTA-registered synch and HibernateTransaction API). + * This mode may not be used with an application server JTA datasource. + *

+ * This is the default mode starting in 3.1; was previously {@link #ON_CLOSE}. + */ + public static final ConnectionReleaseMode AFTER_TRANSACTION = new ConnectionReleaseMode( "after_transaction" ); + + /** + * Indicates that connections should only be released when the Session is explicitly closed + * or disconnected; this is the legacy (Hibernate2 and pre-3.1) behavior. + */ + public static final ConnectionReleaseMode ON_CLOSE = new ConnectionReleaseMode( "on_close" ); + + + private String name; + + private ConnectionReleaseMode(String name) { + this.name = name; + } + + /** + * Override of Object.toString(). Returns the release mode name. + * + * @return The release mode name. + */ + public String toString() { + return name; + } + + /** + * Determine the correct ConnectionReleaseMode instance based on the given + * name. + * + * @param modeName The release mode name. + * @return The appropriate ConnectionReleaseMode instance + * @throws HibernateException Indicates the modeName param did not match any known modes. + */ + public static ConnectionReleaseMode parse(String modeName) throws HibernateException { + if ( AFTER_STATEMENT.name.equals( modeName ) ) { + return AFTER_STATEMENT; + } + else if ( AFTER_TRANSACTION.name.equals( modeName ) ) { + return AFTER_TRANSACTION; + } + else if ( ON_CLOSE.name.equals( modeName ) ) { + return ON_CLOSE; + } + throw new HibernateException( "could not determine appropriate connection release mode [" + modeName + "]" ); + } + + private Object readResolve() { + return parse( name ); + } +} diff --git a/src/org/hibernate/Criteria.java b/src/org/hibernate/Criteria.java new file mode 100644 index 0000000000..d0d9340664 --- /dev/null +++ b/src/org/hibernate/Criteria.java @@ -0,0 +1,338 @@ +//$Id$ +package org.hibernate; + +import java.util.List; + +import org.hibernate.criterion.CriteriaSpecification; +import org.hibernate.criterion.Criterion; +import org.hibernate.criterion.Order; +import org.hibernate.criterion.Projection; +import org.hibernate.transform.ResultTransformer; + +/** + * Criteria is a simplified API for retrieving entities + * by composing Criterion objects. This is a very + * convenient approach for functionality like "search" screens + * where there is a variable number of conditions to be placed + * upon the result set.
+ *
+ * The Session is a factory for Criteria. + * Criterion instances are usually obtained via + * the factory methods on Restrictions. eg. + *

+ * List cats = session.createCriteria(Cat.class)
+ *     .add( Restrictions.like("name", "Iz%") )
+ *     .add( Restrictions.gt( "weight", new Float(minWeight) ) )
+ *     .addOrder( Order.asc("age") )
+ *     .list();
+ * 
+ * You may navigate associations using createAlias() or + * createCriteria(). + *
+ * List cats = session.createCriteria(Cat.class)
+ *     .createCriteria("kittens")
+ *         .add( Restrictions.like("name", "Iz%") )
+ *     .list();
+ * 
+ *
+ * List cats = session.createCriteria(Cat.class)
+ *     .createAlias("kittens", "kit")
+ *     .add( Restrictions.like("kit.name", "Iz%") )
+ *     .list();
+ * 
+ * You may specify projection and aggregation using Projection + * instances obtained via the factory methods on Projections. + *
+ * List cats = session.createCriteria(Cat.class)
+ *     .setProjection( Projections.projectionList()
+ *         .add( Projections.rowCount() )
+ *         .add( Projections.avg("weight") )
+ *         .add( Projections.max("weight") )
+ *         .add( Projections.min("weight") )
+ *         .add( Projections.groupProperty("color") )
+ *     )
+ *     .addOrder( Order.asc("color") )
+ *     .list();
+ * 
+ * + * @see Session#createCriteria(java.lang.Class) + * @see org.hibernate.criterion.Restrictions + * @see org.hibernate.criterion.Projections + * @see org.hibernate.criterion.Order + * @see org.hibernate.criterion.Criterion + * @see org.hibernate.criterion.Projection + * @see org.hibernate.criterion.DetachedCriteria a disconnected version of this API + * @author Gavin King + */ +public interface Criteria extends CriteriaSpecification { + + /** + * Get the alias of the entity encapsulated by this criteria instance. + * + * @return The alias for the encapsulated entity. + */ + public String getAlias(); + + /** + * Used to specify that the query results will be a projection (scalar in + * nature). Implicitly specifies the {@link #PROJECTION} result transformer. + *

+ * The individual components contained within the given + * {@link Projection projection} determines the overall "shape" of the + * query result. + * + * @param projection The projection representing the overall "shape" of the + * query results. + * @return this (for method chaining) + */ + public Criteria setProjection(Projection projection); + + /** + * Add a {@link Criterion restriction} to constrain the results to be + * retrieved. + * + * @param criterion The {@link Criterion criterion} object representing the + * restriction to be applied. + * @return this (for method chaining) + */ + public Criteria add(Criterion criterion); + + /** + * Add an {@link Order ordering} to the result set. + * + * @param order The {@link Order order} object representing an ordering + * to be applied to the results. + * @return this (for method chaining) + */ + public Criteria addOrder(Order order); + + /** + * Specify an association fetching strategy for an association or a + * collection of values. + * + * @param associationPath a dot seperated property path + * @param mode The fetch mode for the referenced association + * @return this (for method chaining) + */ + public Criteria setFetchMode(String associationPath, FetchMode mode) throws HibernateException; + + /** + * Set the lock mode of the current entity + * + * @param lockMode The lock mode to be applied + * @return this (for method chaining) + */ + public Criteria setLockMode(LockMode lockMode); + + /** + * Set the lock mode of the aliased entity + * + * @param alias The previously assigned alias representing the entity to + * which the given lock mode should apply. + * @param lockMode The lock mode to be applied + * @return this (for method chaining) + */ + public Criteria setLockMode(String alias, LockMode lockMode); + + /** + * Join an association, assigning an alias to the joined association. + *

+ * Functionally equivalent to {@link #createAlias(String, String, int)} using + * {@link #INNER_JOIN} for the joinType. + * + * @param associationPath A dot-seperated property path + * @param alias The alias to assign to the joined association (for later reference). + * @return this (for method chaining) + */ + public Criteria createAlias(String associationPath, String alias) throws HibernateException; + + /** + * Join an association using the specified join-type, assigning an alias + * to the joined association. + *

+ * The joinType is expected to be one of {@link #INNER_JOIN} (the default), + * {@link #FULL_JOIN}, or {@link #LEFT_JOIN}. + * + * @param associationPath A dot-seperated property path + * @param alias The alias to assign to the joined association (for later reference). + * @param joinType The type of join to use. + * @return this (for method chaining) + */ + public Criteria createAlias(String associationPath, String alias, int joinType) throws HibernateException; + + /** + * Create a new Criteria, "rooted" at the associated entity. + *

+ * Functionally equivalent to {@link #createCriteria(String, int)} using + * {@link #INNER_JOIN} for the joinType. + * + * @param associationPath A dot-seperated property path + * @return the created "sub criteria" + */ + public Criteria createCriteria(String associationPath) throws HibernateException; + + /** + * Create a new Criteria, "rooted" at the associated entity, using the + * specified join type. + * + * @param associationPath A dot-seperated property path + * @param joinType The type of join to use. + * @return the created "sub criteria" + */ + public Criteria createCriteria(String associationPath, int joinType) throws HibernateException; + + /** + * Create a new Criteria, "rooted" at the associated entity, + * assigning the given alias. + *

+ * Functionally equivalent to {@link #createCriteria(String, String, int)} using + * {@link #INNER_JOIN} for the joinType. + * + * @param associationPath A dot-seperated property path + * @param alias The alias to assign to the joined association (for later reference). + * @return the created "sub criteria" + */ + public Criteria createCriteria(String associationPath, String alias) throws HibernateException; + + /** + * Create a new Criteria, "rooted" at the associated entity, + * assigning the given alias and using the specified join type. + * + * @param associationPath A dot-seperated property path + * @param alias The alias to assign to the joined association (for later reference). + * @param joinType The type of join to use. + * @return the created "sub criteria" + */ + public Criteria createCriteria(String associationPath, String alias, int joinType) throws HibernateException; + + /** + * Set a strategy for handling the query results. This determines the + * "shape" of the query result. + * + * @param resultTransformer The transformer to apply + * @return this (for method chaining) + * + * @see #ROOT_ENTITY + * @see #DISTINCT_ROOT_ENTITY + * @see #ALIAS_TO_ENTITY_MAP + * @see #PROJECTION + */ + public Criteria setResultTransformer(ResultTransformer resultTransformer); + + /** + * Set a limit upon the number of objects to be retrieved. + * + * @param maxResults the maximum number of results + * @return this (for method chaining) + */ + public Criteria setMaxResults(int maxResults); + + /** + * Set the first result to be retrieved. + * + * @param firstResult the first result to retrieve, numbered from 0 + * @return this (for method chaining) + */ + public Criteria setFirstResult(int firstResult); + + /** + * Set a fetch size for the underlying JDBC query. + * + * @param fetchSize the fetch size + * @return this (for method chaining) + * + * @see java.sql.Statement#setFetchSize + */ + public Criteria setFetchSize(int fetchSize); + + /** + * Set a timeout for the underlying JDBC query. + * + * @param timeout The timeout value to apply. + * @return this (for method chaining) + * + * @see java.sql.Statement#setQueryTimeout + */ + public Criteria setTimeout(int timeout); + + /** + * Enable caching of this query result, provided query caching is enabled + * for the underlying session factory. + * + * @param cacheable Should the result be considered cacheable; default is + * to not cache (false). + * @return this (for method chaining) + */ + public Criteria setCacheable(boolean cacheable); + + /** + * Set the name of the cache region to use for query result caching. + * + * @param cacheRegion the name of a query cache region, or null + * for the default query cache + * @return this (for method chaining) + * + * @see #setCacheable + */ + public Criteria setCacheRegion(String cacheRegion); + + /** + * Add a comment to the generated SQL. + * + * @param comment a human-readable string + * @return this (for method chaining) + */ + public Criteria setComment(String comment); + + /** + * Override the flush mode for this particular query. + * + * @param flushMode The flush mode to use. + * @return this (for method chaining) + */ + public Criteria setFlushMode(FlushMode flushMode); + + /** + * Override the cache mode for this particular query. + * + * @param cacheMode The cache mode to use. + * @return this (for method chaining) + */ + public Criteria setCacheMode(CacheMode cacheMode); + + /** + * Get the results. + * + * @return The list of matched query results. + */ + public List list() throws HibernateException; + + /** + * Get the results as an instance of {@link ScrollableResults} + * + * @return The {@link ScrollableResults} representing the matched + * query results. + */ + public ScrollableResults scroll() throws HibernateException; + + /** + * Get the results as an instance of {@link ScrollableResults} based on the + * given scroll mode. + * + * @param scrollMode Indicates the type of underlying database cursor to + * request. + * @return The {@link ScrollableResults} representing the matched + * query results. + */ + public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException; + + /** + * Convenience method to return a single instance that matches + * the query, or null if the query returns no results. + * + * @return the single result or null + * @throws HibernateException if there is more than one matching result + */ + public Object uniqueResult() throws HibernateException; + +} \ No newline at end of file diff --git a/src/org/hibernate/DuplicateMappingException.java b/src/org/hibernate/DuplicateMappingException.java new file mode 100644 index 0000000000..ef936261d0 --- /dev/null +++ b/src/org/hibernate/DuplicateMappingException.java @@ -0,0 +1,32 @@ +package org.hibernate; + +/** + * Raised whenever a duplicate for a certain type occurs. + * Duplicate class, table, property name etc. + * + * @author Max Rydahl Andersen + * + */ +public class DuplicateMappingException extends MappingException { + + private final String name; + private final String type; + + public DuplicateMappingException(String customMessage, String type, String name) { + super(customMessage); + this.type=type; + this.name=name; + } + + public DuplicateMappingException(String type, String name) { + this("Duplicate " + type + " mapping " + name, type, name); + } + + public String getType() { + return type; + } + + public String getName() { + return name; + } +} diff --git a/src/org/hibernate/EmptyInterceptor.java b/src/org/hibernate/EmptyInterceptor.java new file mode 100755 index 0000000000..307d399523 --- /dev/null +++ b/src/org/hibernate/EmptyInterceptor.java @@ -0,0 +1,98 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.util.Iterator; + +import org.hibernate.type.Type; + +/** + * An interceptor that does nothing. May be used as a base class + * for application-defined custom interceptors. + * + * @author Gavin King + */ +public class EmptyInterceptor implements Interceptor, Serializable { + + public static final Interceptor INSTANCE = new EmptyInterceptor(); + + protected EmptyInterceptor() {} + + public void onDelete( + Object entity, + Serializable id, + Object[] state, + String[] propertyNames, + Type[] types) {} + + public boolean onFlushDirty( + Object entity, + Serializable id, + Object[] currentState, + Object[] previousState, + String[] propertyNames, + Type[] types) { + return false; + } + + public boolean onLoad( + Object entity, + Serializable id, + Object[] state, + String[] propertyNames, + Type[] types) { + return false; + } + + public boolean onSave( + Object entity, + Serializable id, + Object[] state, + String[] propertyNames, + Type[] types) { + return false; + } + + public void postFlush(Iterator entities) {} + public void preFlush(Iterator entities) {} + + public Boolean isTransient(Object entity) { + return null; + } + + public Object instantiate(String entityName, EntityMode entityMode, Serializable id) { + return null; + } + + public int[] findDirty(Object entity, + Serializable id, + Object[] currentState, + Object[] previousState, + String[] propertyNames, + Type[] types) { + return null; + } + + public String getEntityName(Object object) { + return null; + } + + public Object getEntity(String entityName, Serializable id) { + return null; + } + + public void afterTransactionBegin(Transaction tx) {} + public void afterTransactionCompletion(Transaction tx) {} + public void beforeTransactionCompletion(Transaction tx) {} + + public String onPrepareStatement(String sql) { + return sql; + } + + public void onCollectionRemove(Object collection, Serializable key) throws CallbackException {} + + public void onCollectionRecreate(Object collection, Serializable key) throws CallbackException {} + + public void onCollectionUpdate(Object collection, Serializable key) throws CallbackException {} + +} \ No newline at end of file diff --git a/src/org/hibernate/EntityMode.java b/src/org/hibernate/EntityMode.java new file mode 100644 index 0000000000..b1d27e4ab4 --- /dev/null +++ b/src/org/hibernate/EntityMode.java @@ -0,0 +1,49 @@ +// $Id$ +package org.hibernate; + +import java.util.Map; +import java.util.HashMap; +import java.io.Serializable; + +/** + * Defines the representation modes available for entities. + * + * @author Steve Ebersole + */ +public class EntityMode implements Serializable { + + private static final Map INSTANCES = new HashMap(); + + public static final EntityMode POJO = new EntityMode( "pojo" ); + public static final EntityMode DOM4J = new EntityMode( "dom4j" ); + public static final EntityMode MAP = new EntityMode( "dynamic-map" ); + + static { + INSTANCES.put( POJO.name, POJO ); + INSTANCES.put( DOM4J.name, DOM4J ); + INSTANCES.put( MAP.name, MAP ); + } + + private final String name; + + public EntityMode(String name) { + this.name = name; + } + + public String toString() { + return name; + } + + private Object readResolve() { + return INSTANCES.get( name ); + } + + public static EntityMode parse(String name) { + EntityMode rtn = ( EntityMode ) INSTANCES.get( name ); + if ( rtn == null ) { + // default is POJO + rtn = POJO; + } + return rtn; + } +} diff --git a/src/org/hibernate/FetchMode.java b/src/org/hibernate/FetchMode.java new file mode 100644 index 0000000000..ae990bf5ac --- /dev/null +++ b/src/org/hibernate/FetchMode.java @@ -0,0 +1,70 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +/** + * Represents an association fetching strategy. This is used + * together with the Criteria API to specify runtime + * fetching strategies.
+ *
+ * For HQL queries, use the FETCH keyword instead. + * + * @see Criteria#setFetchMode(java.lang.String, FetchMode) + * @author Gavin King + */ +public final class FetchMode implements Serializable { + private final String name; + private static final Map INSTANCES = new HashMap(); + + private FetchMode(String name) { + this.name=name; + } + public String toString() { + return name; + } + /** + * Default to the setting configured in the mapping file. + */ + public static final FetchMode DEFAULT = new FetchMode("DEFAULT"); + + /** + * Fetch using an outer join. Equivalent to fetch="join". + */ + public static final FetchMode JOIN = new FetchMode("JOIN"); + /** + * Fetch eagerly, using a separate select. Equivalent to + * fetch="select". + */ + public static final FetchMode SELECT = new FetchMode("SELECT"); + + /** + * Fetch lazily. Equivalent to outer-join="false". + * @deprecated use FetchMode.SELECT + */ + public static final FetchMode LAZY = SELECT; + /** + * Fetch eagerly, using an outer join. Equivalent to + * outer-join="true". + * @deprecated use FetchMode.JOIN + */ + public static final FetchMode EAGER = JOIN; + + static { + INSTANCES.put( JOIN.name, JOIN ); + INSTANCES.put( SELECT.name, SELECT ); + INSTANCES.put( DEFAULT.name, DEFAULT ); + } + + private Object readResolve() { + return INSTANCES.get(name); + } + +} + + + + + diff --git a/src/org/hibernate/Filter.java b/src/org/hibernate/Filter.java new file mode 100644 index 0000000000..692ef2b5f9 --- /dev/null +++ b/src/org/hibernate/Filter.java @@ -0,0 +1,68 @@ +// $Id$ +package org.hibernate; + +import org.hibernate.engine.FilterDefinition; + +import java.util.Collection; + +/** + * Type definition of Filter. Filter defines the user's view into enabled dynamic filters, + * allowing them to set filter parameter values. + * + * @author Steve Ebersole + */ +public interface Filter { + + /** + * Get the name of this filter. + * + * @return This filter's name. + */ + public String getName(); + + /** + * Get the filter definition containing additional information about the + * filter (such as default-condition and expected parameter names/types). + * + * @return The filter definition + */ + public FilterDefinition getFilterDefinition(); + + + /** + * Set the named parameter's value for this filter. + * + * @param name The parameter's name. + * @param value The value to be applied. + * @return This FilterImpl instance (for method chaining). + */ + public Filter setParameter(String name, Object value); + + /** + * Set the named parameter's value list for this filter. Used + * in conjunction with IN-style filter criteria. + * + * @param name The parameter's name. + * @param values The values to be expanded into an SQL IN list. + * @return This FilterImpl instance (for method chaining). + */ + public Filter setParameterList(String name, Collection values); + + /** + * Set the named parameter's value list for this filter. Used + * in conjunction with IN-style filter criteria. + * + * @param name The parameter's name. + * @param values The values to be expanded into an SQL IN list. + * @return This FilterImpl instance (for method chaining). + */ + public Filter setParameterList(String name, Object[] values); + + /** + * Perform validation of the filter state. This is used to verify the + * state of the filter after its enablement and before its use. + * + * @throws HibernateException If the state is not currently valid. + */ + public void validate() throws HibernateException; +} diff --git a/src/org/hibernate/FlushMode.java b/src/org/hibernate/FlushMode.java new file mode 100644 index 0000000000..3520eaa0fd --- /dev/null +++ b/src/org/hibernate/FlushMode.java @@ -0,0 +1,92 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +/** + * Represents a flushing strategy. The flush process synchronizes + * database state with session state by detecting state changes + * and executing SQL statements. + * + * @see Session#setFlushMode(FlushMode) + * @see Query#setFlushMode(FlushMode) + * @see Criteria#setFlushMode(FlushMode) + * + * @author Gavin King + */ +public final class FlushMode implements Serializable { + private static final Map INSTANCES = new HashMap(); + + private final int level; + private final String name; + + private FlushMode(int level, String name) { + this.level = level; + this.name = name; + } + + public String toString() { + return name; + } + + /** + * The {@link Session} is never flushed unless {@link Session#flush} + * is explicitly called by the application. This mode is very + * efficient for read only transactions. + * + * @deprecated use {@link #MANUAL} instead. + */ + public static final FlushMode NEVER = new FlushMode( 0, "NEVER" ); + + /** + * The {@link Session} is only ever flushed when {@link Session#flush} + * is explicitly called by the application. This mode is very + * efficient for read only transactions. + */ + public static final FlushMode MANUAL = new FlushMode( 0, "MANUAL" ); + + /** + * The {@link Session} is flushed when {@link Transaction#commit} + * is called. + */ + public static final FlushMode COMMIT = new FlushMode(5, "COMMIT"); + + /** + * The {@link Session} is sometimes flushed before query execution + * in order to ensure that queries never return stale state. This + * is the default flush mode. + */ + public static final FlushMode AUTO = new FlushMode(10, "AUTO"); + + /** + * The {@link Session} is flushed before every query. This is + * almost always unnecessary and inefficient. + */ + public static final FlushMode ALWAYS = new FlushMode(20, "ALWAYS"); + + public boolean lessThan(FlushMode other) { + return this.level + *

  • Provides access to the full range of Hibernate built-in types. Type + * instances may be used to bind values to query parameters. + *
  • A factory for new Blobs and Clobs. + *
  • Defines static methods for manipulation of proxies. + * + * + * @author Gavin King + * @see java.sql.Clob + * @see java.sql.Blob + * @see org.hibernate.type.Type + */ + +public final class Hibernate { + + /** + * Hibernate long type. + */ + public static final NullableType LONG = new LongType(); + /** + * Hibernate short type. + */ + public static final NullableType SHORT = new ShortType(); + /** + * Hibernate integer type. + */ + public static final NullableType INTEGER = new IntegerType(); + /** + * Hibernate byte type. + */ + public static final NullableType BYTE = new ByteType(); + /** + * Hibernate float type. + */ + public static final NullableType FLOAT = new FloatType(); + /** + * Hibernate double type. + */ + public static final NullableType DOUBLE = new DoubleType(); + /** + * Hibernate character type. + */ + public static final NullableType CHARACTER = new CharacterType(); + /** + * Hibernate string type. + */ + public static final NullableType STRING = new StringType(); + /** + * Hibernate time type. + */ + public static final NullableType TIME = new TimeType(); + /** + * Hibernate date type. + */ + public static final NullableType DATE = new DateType(); + /** + * Hibernate timestamp type. + */ + public static final NullableType TIMESTAMP = new TimestampType(); + /** + * Hibernate boolean type. + */ + public static final NullableType BOOLEAN = new BooleanType(); + /** + * Hibernate true_false type. + */ + public static final NullableType TRUE_FALSE = new TrueFalseType(); + /** + * Hibernate yes_no type. + */ + public static final NullableType YES_NO = new YesNoType(); + /** + * Hibernate big_decimal type. + */ + public static final NullableType BIG_DECIMAL = new BigDecimalType(); + /** + * Hibernate big_integer type. + */ + public static final NullableType BIG_INTEGER = new BigIntegerType(); + /** + * Hibernate binary type. + */ + public static final NullableType BINARY = new BinaryType(); + /** + * Hibernate wrapper-binary type. + */ + public static final NullableType WRAPPER_BINARY = new WrapperBinaryType(); + /** + * Hibernate char[] type. + */ + public static final NullableType CHAR_ARRAY = new CharArrayType(); + /** + * Hibernate Character[] type. + */ + public static final NullableType CHARACTER_ARRAY = new CharacterArrayType(); + /** + * Hibernate text type. + */ + public static final NullableType TEXT = new TextType(); + /** + * Hibernate blob type. + */ + public static final Type BLOB = new BlobType(); + /** + * Hibernate clob type. + */ + public static final Type CLOB = new ClobType(); + /** + * Hibernate calendar type. + */ + public static final NullableType CALENDAR = new CalendarType(); + /** + * Hibernate calendar_date type. + */ + public static final NullableType CALENDAR_DATE = new CalendarDateType(); + /** + * Hibernate locale type. + */ + public static final NullableType LOCALE = new LocaleType(); + /** + * Hibernate currency type. + */ + public static final NullableType CURRENCY = new CurrencyType(); + /** + * Hibernate timezone type. + */ + public static final NullableType TIMEZONE = new TimeZoneType(); + /** + * Hibernate class type. + */ + public static final NullableType CLASS = new ClassType(); + /** + * Hibernate serializable type. + */ + public static final NullableType SERIALIZABLE = new SerializableType( Serializable.class ); + /** + * Hibernate object type. + */ + public static final Type OBJECT = new AnyType(); + + + /** + * Cannot be instantiated. + */ + private Hibernate() { + throw new UnsupportedOperationException(); + } + + /** + * A Hibernate serializable type. + */ + public static Type serializable(Class serializableClass) { + return new SerializableType( serializableClass ); + } + + /** + * A Hibernate any type. + * + * @param metaType a type mapping java.lang.Class to a single column + * @param identifierType the entity identifier type + * @return the Type + */ + public static Type any(Type metaType, Type identifierType) { + return new AnyType( metaType, identifierType ); + } + + /** + * A Hibernate persistent object (entity) type. + * + * @param persistentClass a mapped entity class + */ + public static Type entity(Class persistentClass) { + // not really a many-to-one association *necessarily* + return new ManyToOneType( persistentClass.getName() ); + } + + /** + * A Hibernate persistent object (entity) type. + * + * @param entityName a mapped entity class + */ + public static Type entity(String entityName) { + // not really a many-to-one association *necessarily* + return new ManyToOneType( entityName ); + } + + /** + * A Hibernate custom type. + * + * @param userTypeClass a class that implements UserType + */ + public static Type custom(Class userTypeClass) throws HibernateException { + return custom( userTypeClass, null ); + } + + /** + * A Hibernate parameterizable custom type. + * + * @param userTypeClass a class that implements UserType and ParameterizableType + * @param parameterNames the names of the parameters passed to the type + * @param parameterValues the values of the parameters passed to the type. They must match + * up with the order and length of the parameterNames array. + */ + public static Type custom(Class userTypeClass, String[] parameterNames, String[] parameterValues) + throws HibernateException { + Properties parameters = new Properties(); + for ( int i = 0; i < parameterNames.length; i++ ) { + parameters.setProperty( parameterNames[i], parameterValues[i] ); + } + return custom( userTypeClass, parameters ); + } + + /** + * A Hibernate parameterizable custom type. + * + * @param userTypeClass a class that implements UserType and ParameterizableType + * @param parameters the parameters as a collection of name/value pairs + */ + public static Type custom(Class userTypeClass, Properties parameters) + throws HibernateException { + if ( CompositeUserType.class.isAssignableFrom( userTypeClass ) ) { + CompositeCustomType type = new CompositeCustomType( userTypeClass, parameters ); + return type; + } + else { + CustomType type = new CustomType( userTypeClass, parameters ); + return type; + } + } + + /** + * Force initialization of a proxy or persistent collection. + *

    + * Note: This only ensures intialization of a proxy object or collection; + * it is not guaranteed that the elements INSIDE the collection will be initialized/materialized. + * + * @param proxy a persistable object, proxy, persistent collection or null + * @throws HibernateException if we can't initialize the proxy at this time, eg. the Session was closed + */ + public static void initialize(Object proxy) throws HibernateException { + if ( proxy == null ) { + return; + } + else if ( proxy instanceof HibernateProxy ) { + ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().initialize(); + } + else if ( proxy instanceof PersistentCollection ) { + ( ( PersistentCollection ) proxy ).forceInitialization(); + } + } + + /** + * Check if the proxy or persistent collection is initialized. + * + * @param proxy a persistable object, proxy, persistent collection or null + * @return true if the argument is already initialized, or is not a proxy or collection + */ + public static boolean isInitialized(Object proxy) { + if ( proxy instanceof HibernateProxy ) { + return !( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().isUninitialized(); + } + else if ( proxy instanceof PersistentCollection ) { + return ( ( PersistentCollection ) proxy ).wasInitialized(); + } + else { + return true; + } + } + + /** + * Get the true, underlying class of a proxied persistent class. This operation + * will initialize a proxy by side-effect. + * + * @param proxy a persistable object or proxy + * @return the true class of the instance + * @throws HibernateException + */ + public static Class getClass(Object proxy) { + if ( proxy instanceof HibernateProxy ) { + return ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer() + .getImplementation() + .getClass(); + } + else { + return proxy.getClass(); + } + } + + /** + * Create a new Blob. The returned object will be initially immutable. + * + * @param bytes a byte array + * @return the Blob + */ + public static Blob createBlob(byte[] bytes) { + return new SerializableBlob( new BlobImpl( bytes ) ); + } + + /** + * Create a new Blob. The returned object will be initially immutable. + * + * @param stream a binary stream + * @param length the number of bytes in the stream + * @return the Blob + */ + public static Blob createBlob(InputStream stream, int length) { + return new SerializableBlob( new BlobImpl( stream, length ) ); + } + + /** + * Create a new Blob. The returned object will be initially immutable. + * + * @param stream a binary stream + * @return the Blob + * @throws IOException + */ + public static Blob createBlob(InputStream stream) throws IOException { + return new SerializableBlob( new BlobImpl( stream, stream.available() ) ); + } + + /** + * Create a new Clob. The returned object will be initially immutable. + * + * @param string a String + */ + public static Clob createClob(String string) { + return new SerializableClob( new ClobImpl( string ) ); + } + + /** + * Create a new Clob. The returned object will be initially immutable. + * + * @param reader a character stream + * @param length the number of characters in the stream + */ + public static Clob createClob(Reader reader, int length) { + return new SerializableClob( new ClobImpl( reader, length ) ); + } + + /** + * Close an Iterator created by iterate() immediately, + * instead of waiting until the session is closed or disconnected. + * + * @param iterator an Iterator created by iterate() + * @throws HibernateException + * @see org.hibernate.Query#iterate + * @see Query#iterate() + */ + public static void close(Iterator iterator) throws HibernateException { + if ( iterator instanceof HibernateIterator ) { + ( ( HibernateIterator ) iterator ).close(); + } + else { + throw new IllegalArgumentException( "not a Hibernate iterator" ); + } + } + + /** + * Check if the property is initialized. If the named property does not exist + * or is not persistent, this method always returns true. + * + * @param proxy The potential proxy + * @param propertyName the name of a persistent attribute of the object + * @return true if the named property of the object is not listed as uninitialized + * @return false if the object is an uninitialized proxy, or the named property is uninitialized + */ + public static boolean isPropertyInitialized(Object proxy, String propertyName) { + + Object entity; + if ( proxy instanceof HibernateProxy ) { + LazyInitializer li = ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer(); + if ( li.isUninitialized() ) { + return false; + } + else { + entity = li.getImplementation(); + } + } + else { + entity = proxy; + } + + if ( FieldInterceptionHelper.isInstrumented( entity ) ) { + FieldInterceptor interceptor = FieldInterceptionHelper.extractFieldInterceptor( entity ); + return interceptor == null || interceptor.isInitialized( propertyName ); + } + else { + return true; + } + + } + +} diff --git a/src/org/hibernate/HibernateException.java b/src/org/hibernate/HibernateException.java new file mode 100644 index 0000000000..c9f566b775 --- /dev/null +++ b/src/org/hibernate/HibernateException.java @@ -0,0 +1,34 @@ +//$Id$ +package org.hibernate; + +import org.hibernate.exception.NestableRuntimeException; + +/** + * Any exception that occurs inside the persistence layer + * or JDBC driver. SQLExceptions are always wrapped + * by instances of JDBCException. + * + * @see JDBCException + * @author Gavin King + */ + +public class HibernateException extends NestableRuntimeException { + + public HibernateException(Throwable root) { + super(root); + } + + public HibernateException(String string, Throwable root) { + super(string, root); + } + + public HibernateException(String s) { + super(s); + } +} + + + + + + diff --git a/src/org/hibernate/InstantiationException.java b/src/org/hibernate/InstantiationException.java new file mode 100644 index 0000000000..bf8c8d4743 --- /dev/null +++ b/src/org/hibernate/InstantiationException.java @@ -0,0 +1,44 @@ +//$Id$ +package org.hibernate; + +/** + * Thrown if Hibernate can't instantiate an entity or component + * class at runtime. + * + * @author Gavin King + */ + +public class InstantiationException extends HibernateException { + + private final Class clazz; + + public InstantiationException(String s, Class clazz, Throwable root) { + super(s, root); + this.clazz = clazz; + } + + public InstantiationException(String s, Class clazz) { + super(s); + this.clazz = clazz; + } + + public InstantiationException(String s, Class clazz, Exception e) { + super(s, e); + this.clazz = clazz; + } + + public Class getPersistentClass() { + return clazz; + } + + public String getMessage() { + return super.getMessage() + clazz.getName(); + } + +} + + + + + + diff --git a/src/org/hibernate/Interceptor.java b/src/org/hibernate/Interceptor.java new file mode 100644 index 0000000000..844db2ea5a --- /dev/null +++ b/src/org/hibernate/Interceptor.java @@ -0,0 +1,155 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.util.Iterator; + +import org.hibernate.type.Type; + +/** + * Allows user code to inspect and/or change property values. + *

    + * Inspection occurs before property values are written and after they are read + * from the database.
    + *
    + * There might be a single instance of Interceptor for a SessionFactory, or a new instance + * might be specified for each Session. Whichever approach is used, the interceptor must be + * serializable if the Session is to be serializable. This means that SessionFactory-scoped + * interceptors should implement readResolve().
    + *
    + * The Session may not be invoked from a callback (nor may a callback cause a collection or proxy to + * be lazily initialized).
    + *
    + * Instead of implementing this interface directly, it is usually better to extend EmptyInterceptor + * and override only the callback methods of interest. + * + * @see SessionFactory#openSession(Interceptor) + * @see org.hibernate.cfg.Configuration#setInterceptor(Interceptor) + * @see EmptyInterceptor + * @author Gavin King + */ +public interface Interceptor { + /** + * Called just before an object is initialized. The interceptor may change the state, which will + * be propagated to the persistent object. Note that when this method is called, entity will be + * an empty uninitialized instance of the class. + * + * @return true if the user modified the state in any way. + */ + public boolean onLoad(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException; + /** + * Called when an object is detected to be dirty, during a flush. The interceptor may modify the detected + * currentState, which will be propagated to both the database and the persistent object. + * Note that not all flushes end in actual synchronization with the database, in which case the + * new currentState will be propagated to the object, but not necessarily (immediately) to + * the database. It is strongly recommended that the interceptor not modify the previousState. + * + * @return true if the user modified the currentState in any way. + */ + public boolean onFlushDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types) throws CallbackException; + /** + * Called before an object is saved. The interceptor may modify the state, which will be used for + * the SQL INSERT and propagated to the persistent object. + * + * @return true if the user modified the state in any way. + */ + public boolean onSave(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException; + /** + * Called before an object is deleted. It is not recommended that the interceptor modify the state. + */ + public void onDelete(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException; + /** + * Called before a collection is (re)created. + */ + public void onCollectionRecreate(Object collection, Serializable key) throws CallbackException; + /** + * Called before a collection is deleted. + */ + public void onCollectionRemove(Object collection, Serializable key) throws CallbackException; + /** + * Called before a collection is updated. + */ + public void onCollectionUpdate(Object collection, Serializable key) throws CallbackException; + /** + * Called before a flush + */ + public void preFlush(Iterator entities) throws CallbackException; + /** + * Called after a flush that actually ends in execution of the SQL statements required to synchronize + * in-memory state with the database. + */ + public void postFlush(Iterator entities) throws CallbackException; + /** + * Called to distinguish between transient and detached entities. The return value determines the + * state of the entity with respect to the current session. + *

    + * @param entity a transient or detached entity + * @return Boolean or null to choose default behaviour + */ + public Boolean isTransient(Object entity); + /** + * Called from flush(). The return value determines whether the entity is updated + * + * @param entity a persistent entity + * @return array of dirty property indices or null to choose default behaviour + */ + public int[] findDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types); + /** + * Instantiate the entity class. Return null to indicate that Hibernate should use + * the default constructor of the class. The identifier property of the returned instance + * should be initialized with the given identifier. + * + * @param entityName the name of the entity + * @param entityMode The type of entity instance to be returned. + * @param id the identifier of the new instance + * @return an instance of the class, or null to choose default behaviour + */ + public Object instantiate(String entityName, EntityMode entityMode, Serializable id) throws CallbackException; + + /** + * Get the entity name for a persistent or transient instance + * @param object an entity instance + * @return the name of the entity + */ + public String getEntityName(Object object) throws CallbackException; + + /** + * Get a fully loaded entity instance that is cached externally + * @param entityName the name of the entity + * @param id the instance identifier + * @return a fully initialized entity + * @throws CallbackException + */ + public Object getEntity(String entityName, Serializable id) throws CallbackException; + + /** + * Called when a Hibernate transaction is begun via the Hibernate Transaction + * API. Will not be called if transactions are being controlled via some other + * mechanism (CMT, for example). + */ + public void afterTransactionBegin(Transaction tx); + /** + * Called before a transaction is committed (but not before rollback). + */ + public void beforeTransactionCompletion(Transaction tx); + /** + * Called after a transaction is committed or rolled back. + */ + public void afterTransactionCompletion(Transaction tx); + + /** + * Called when sql string is being prepared. + * @param sql sql to be prepared + * @return original or modified sql + */ + public String onPrepareStatement(String sql); +} diff --git a/src/org/hibernate/InvalidMappingException.java b/src/org/hibernate/InvalidMappingException.java new file mode 100644 index 0000000000..23e6eef967 --- /dev/null +++ b/src/org/hibernate/InvalidMappingException.java @@ -0,0 +1,42 @@ +package org.hibernate; + +/** + * Thrown when a mapping is found to be invalid. + * Similar to MappingException, but this contains more info about the path and type of mapping (e.g. file, resource or url) + * + * @author Max Rydahl Andersen + * + */ +public class InvalidMappingException extends MappingException { + + private final String path; + private final String type; + + public InvalidMappingException(String customMessage, String type, String path, Throwable cause) { + super(customMessage, cause); + this.type=type; + this.path=path; + } + + public InvalidMappingException(String customMessage, String type, String path) { + super(customMessage); + this.type=type; + this.path=path; + } + + public InvalidMappingException(String type, String path) { + this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path); + } + + public InvalidMappingException(String type, String path, Throwable cause) { + this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path, cause); + } + + public String getType() { + return type; + } + + public String getPath() { + return path; + } +} diff --git a/src/org/hibernate/JDBCException.java b/src/org/hibernate/JDBCException.java new file mode 100644 index 0000000000..c258c975d7 --- /dev/null +++ b/src/org/hibernate/JDBCException.java @@ -0,0 +1,63 @@ +//$Id$ +package org.hibernate; + +import java.sql.SQLException; + + +/** + * Wraps an SQLException. Indicates that an exception + * occurred during a JDBC call. + * + * @see java.sql.SQLException + * @author Gavin King + */ +public class JDBCException extends HibernateException { + + private SQLException sqle; + private String sql; + + public JDBCException(String string, SQLException root) { + super(string, root); + sqle=root; + } + + public JDBCException(String string, SQLException root, String sql) { + this(string, root); + this.sql = sql; + } + + /** + * Get the SQLState of the underlying SQLException. + * @see java.sql.SQLException + * @return String + */ + public String getSQLState() { + return sqle.getSQLState(); + } + + /** + * Get the errorCode of the underlying SQLException. + * @see java.sql.SQLException + * @return int the error code + */ + public int getErrorCode() { + return sqle.getErrorCode(); + } + + /** + * Get the underlying SQLException. + * @return SQLException + */ + public SQLException getSQLException() { + return sqle; + } + + /** + * Get the actual SQL statement that caused the exception + * (may be null) + */ + public String getSQL() { + return sql; + } + +} diff --git a/src/org/hibernate/LazyInitializationException.java b/src/org/hibernate/LazyInitializationException.java new file mode 100644 index 0000000000..87dfea4314 --- /dev/null +++ b/src/org/hibernate/LazyInitializationException.java @@ -0,0 +1,28 @@ +//$Id$ +package org.hibernate; + +import org.apache.commons.logging.LogFactory; + +/** + * Indicates access to unfetched data outside of a session context. + * For example, when an uninitialized proxy or collection is accessed + * after the session was closed. + * + * @see Hibernate#initialize(java.lang.Object) + * @see Hibernate#isInitialized(java.lang.Object) + * @author Gavin King + */ +public class LazyInitializationException extends HibernateException { + + public LazyInitializationException(String msg) { + super(msg); + LogFactory.getLog(LazyInitializationException.class).error(msg, this); + } + +} + + + + + + diff --git a/src/org/hibernate/LockMode.java b/src/org/hibernate/LockMode.java new file mode 100644 index 0000000000..b144e6ec6c --- /dev/null +++ b/src/org/hibernate/LockMode.java @@ -0,0 +1,106 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +/** + * Instances represent a lock mode for a row of a relational + * database table. It is not intended that users spend much + * time worrying about locking since Hibernate usually + * obtains exactly the right lock level automatically. + * Some "advanced" users may wish to explicitly specify lock + * levels. + * + * @see Session#lock(Object,LockMode) + * @author Gavin King + */ +public final class LockMode implements Serializable { + private final int level; + private final String name; + private static final Map INSTANCES = new HashMap(); + + private LockMode(int level, String name) { + this.level=level; + this.name=name; + } + public String toString() { + return name; + } + /** + * Check if this lock mode is more restrictive than the given lock mode. + * + * @param mode LockMode to check + * @return true if this lock mode is more restrictive than given lock mode + */ + public boolean greaterThan(LockMode mode) { + return level > mode.level; + } + /** + * Check if this lock mode is less restrictive than the given lock mode. + * + * @param mode LockMode to check + * @return true if this lock mode is less restrictive than given lock mode + */ + public boolean lessThan(LockMode mode) { + return level < mode.level; + } + /** + * No lock required. If an object is requested with this lock + * mode, a READ lock will be obtained if it is + * necessary to actually read the state from the database, + * rather than pull it from a cache.
    + *
    + * This is the "default" lock mode. + */ + public static final LockMode NONE = new LockMode(0, "NONE"); + /** + * A shared lock. Objects in this lock mode were read from + * the database in the current transaction, rather than being + * pulled from a cache. + */ + public static final LockMode READ = new LockMode(5, "READ"); + /** + * An upgrade lock. Objects loaded in this lock mode are + * materialized using an SQL select ... for update. + */ + public static final LockMode UPGRADE = new LockMode(10, "UPGRADE"); + /** + * Attempt to obtain an upgrade lock, using an Oracle-style + * select for update nowait. The semantics of + * this lock mode, once obtained, are the same as + * UPGRADE. + */ + public static final LockMode UPGRADE_NOWAIT = new LockMode(10, "UPGRADE_NOWAIT"); + /** + * A WRITE lock is obtained when an object is updated + * or inserted. This lock mode is for internal use only and is + * not a valid mode for load() or lock() (both + * of which throw exceptions if WRITE is specified). + */ + public static final LockMode WRITE = new LockMode(10, "WRITE"); + + /** + * Similiar to {@link #UPGRADE} except that, for versioned entities, + * it results in a forced version increment. + */ + public static final LockMode FORCE = new LockMode( 15, "FORCE" ); + + static { + INSTANCES.put( NONE.name, NONE ); + INSTANCES.put( READ.name, READ ); + INSTANCES.put( UPGRADE.name, UPGRADE ); + INSTANCES.put( UPGRADE_NOWAIT.name, UPGRADE_NOWAIT ); + INSTANCES.put( WRITE.name, WRITE ); + INSTANCES.put( FORCE.name, FORCE ); + } + + private Object readResolve() { + return parse( name ); + } + + public static LockMode parse(String name) { + return ( LockMode ) INSTANCES.get(name); + } +} diff --git a/src/org/hibernate/MappingException.java b/src/org/hibernate/MappingException.java new file mode 100644 index 0000000000..ce00409584 --- /dev/null +++ b/src/org/hibernate/MappingException.java @@ -0,0 +1,31 @@ +//$Id$ +package org.hibernate; + +/** + * An exception that usually occurs at configuration time, rather + * than runtime, as a result of something screwy in the O-R mappings. + * + * @author Gavin King + */ + +public class MappingException extends HibernateException { + + public MappingException(String msg, Throwable root) { + super( msg, root ); + } + + public MappingException(Throwable root) { + super(root); + } + + public MappingException(String s) { + super(s); + } + +} + + + + + + diff --git a/src/org/hibernate/MappingNotFoundException.java b/src/org/hibernate/MappingNotFoundException.java new file mode 100644 index 0000000000..f5701098b5 --- /dev/null +++ b/src/org/hibernate/MappingNotFoundException.java @@ -0,0 +1,41 @@ +package org.hibernate; + +/** + * Thrown when a resource for a mapping could not be found. + * + * @author Max Rydahl Andersen + * + */ +public class MappingNotFoundException extends MappingException { + + private final String path; + private final String type; + + public MappingNotFoundException(String customMessage, String type, String path, Throwable cause) { + super(customMessage, cause); + this.type=type; + this.path=path; + } + + public MappingNotFoundException(String customMessage, String type, String path) { + super(customMessage); + this.type=type; + this.path=path; + } + + public MappingNotFoundException(String type, String path) { + this(type + ": " + path + " not found", type, path); + } + + public MappingNotFoundException(String type, String path, Throwable cause) { + this(type + ": " + path + " not found", type, path, cause); + } + + public String getType() { + return type; + } + + public String getPath() { + return path; + } +} diff --git a/src/org/hibernate/NonUniqueObjectException.java b/src/org/hibernate/NonUniqueObjectException.java new file mode 100644 index 0000000000..825b2e45fa --- /dev/null +++ b/src/org/hibernate/NonUniqueObjectException.java @@ -0,0 +1,44 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; + +import org.hibernate.pretty.MessageHelper; + +/** + * This exception is thrown when an operation would + * break session-scoped identity. This occurs if the + * user tries to associate two different instances of + * the same Java class with a particular identifier, + * in the scope of a single Session. + * + * @author Gavin King + */ +public class NonUniqueObjectException extends HibernateException { + private final Serializable identifier; + private final String entityName; + + public NonUniqueObjectException(String message, Serializable id, String clazz) { + super(message); + this.entityName = clazz; + this.identifier = id; + } + + public NonUniqueObjectException(Serializable id, String clazz) { + this("a different object with the same identifier value was already associated with the session", id, clazz); + } + + public Serializable getIdentifier() { + return identifier; + } + + public String getMessage() { + return super.getMessage() + ": " + + MessageHelper.infoString(entityName, identifier); + } + + public String getEntityName() { + return entityName; + } + +} diff --git a/src/org/hibernate/NonUniqueResultException.java b/src/org/hibernate/NonUniqueResultException.java new file mode 100644 index 0000000000..dc50cef3c9 --- /dev/null +++ b/src/org/hibernate/NonUniqueResultException.java @@ -0,0 +1,17 @@ +//$Id$ +package org.hibernate; + +/** + * Thrown when the application calls Query.uniqueResult() and + * the query returned more than one result. Unlike all other Hibernate + * exceptions, this one is recoverable! + * + * @author Gavin King + */ +public class NonUniqueResultException extends HibernateException { + + public NonUniqueResultException(int resultCount) { + super( "query did not return a unique result: " + resultCount ); + } + +} diff --git a/src/org/hibernate/ObjectDeletedException.java b/src/org/hibernate/ObjectDeletedException.java new file mode 100644 index 0000000000..28492eeec2 --- /dev/null +++ b/src/org/hibernate/ObjectDeletedException.java @@ -0,0 +1,25 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; + +/** + * Thrown when the user tries to do something illegal with a deleted + * object. + * + * @author Gavin King + */ +public class ObjectDeletedException extends UnresolvableObjectException { + + public ObjectDeletedException(String message, Serializable identifier, String clazz) { + super(message, identifier, clazz); + } + +} + + + + + + + diff --git a/src/org/hibernate/ObjectNotFoundException.java b/src/org/hibernate/ObjectNotFoundException.java new file mode 100644 index 0000000000..db31c3d51b --- /dev/null +++ b/src/org/hibernate/ObjectNotFoundException.java @@ -0,0 +1,24 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; + +/** + * Thrown when Session.load() fails to select a row with + * the given primary key (identifier value). This exception might not + * be thrown when load() is called, even if there was no + * row on the database, because load() returns a proxy if + * possible. Applications should use Session.get() to test if + * a row exists in the database.
    + *
    + * Like all Hibernate exceptions, this exception is considered + * unrecoverable. + * + * @author Gavin King + */ +public class ObjectNotFoundException extends UnresolvableObjectException { + + public ObjectNotFoundException(Serializable identifier, String clazz) { + super(identifier, clazz); + } +} diff --git a/src/org/hibernate/PersistentObjectException.java b/src/org/hibernate/PersistentObjectException.java new file mode 100644 index 0000000000..2f9a27c1d2 --- /dev/null +++ b/src/org/hibernate/PersistentObjectException.java @@ -0,0 +1,21 @@ +//$Id$ +package org.hibernate; + +/** + * Thrown when the user passes a persistent instance to a Session + * method that expects a transient instance. + * + * @author Gavin King + */ +public class PersistentObjectException extends HibernateException { + + public PersistentObjectException(String s) { + super(s); + } +} + + + + + + diff --git a/src/org/hibernate/PropertyAccessException.java b/src/org/hibernate/PropertyAccessException.java new file mode 100644 index 0000000000..e16d0a723e --- /dev/null +++ b/src/org/hibernate/PropertyAccessException.java @@ -0,0 +1,50 @@ +//$Id$ +package org.hibernate; + +import org.hibernate.util.StringHelper; + +/** + * A problem occurred accessing a property of an instance of a + * persistent class by reflection, or via CGLIB. There are a + * number of possible underlying causes, including + * + * @author Gavin King + */ +public class PropertyAccessException extends HibernateException { + + private final Class persistentClass; + private final String propertyName; + private final boolean wasSetter; + + public PropertyAccessException(Throwable root, String s, boolean wasSetter, Class persistentClass, String propertyName) { + super(s, root); + this.persistentClass = persistentClass; + this.wasSetter = wasSetter; + this.propertyName = propertyName; + } + + public Class getPersistentClass() { + return persistentClass; + } + + public String getPropertyName() { + return propertyName; + } + + public String getMessage() { + return super.getMessage() + + ( wasSetter ? " setter of " : " getter of ") + + StringHelper.qualify( persistentClass.getName(), propertyName ); + } +} + + + + + + diff --git a/src/org/hibernate/PropertyNotFoundException.java b/src/org/hibernate/PropertyNotFoundException.java new file mode 100644 index 0000000000..26467fb9bf --- /dev/null +++ b/src/org/hibernate/PropertyNotFoundException.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate; + +/** + * Indicates that an expected getter or setter method could not be + * found on a class. + * + * @author Gavin King + */ +public class PropertyNotFoundException extends MappingException { + + public PropertyNotFoundException(String s) { + super(s); + } + +} + + + + + + diff --git a/src/org/hibernate/PropertyValueException.java b/src/org/hibernate/PropertyValueException.java new file mode 100644 index 0000000000..27516a566c --- /dev/null +++ b/src/org/hibernate/PropertyValueException.java @@ -0,0 +1,56 @@ +//$Id$ +package org.hibernate; + +import org.hibernate.util.StringHelper; + +/** + * Thrown when the (illegal) value of a property can not be persisted. + * There are two main causes: + * + * @author Gavin King + */ +public class PropertyValueException extends HibernateException { + + private final String entityName; + private final String propertyName; + + public PropertyValueException(String s, String entityName, String propertyName) { + super(s); + this.entityName = entityName; + this.propertyName = propertyName; + } + + public String getEntityName() { + return entityName; + } + + public String getPropertyName() { + return propertyName; + } + + public String getMessage() { + return super.getMessage() + ": " + + StringHelper.qualify(entityName, propertyName); + } + + /** + * Return a well formed property path. + * Basicaly, it will return parent.child + * + * @param parent parent in path + * @param child child in path + * @return parent-child path + */ + public static String buildPropertyPath(String parent, String child) { + return new StringBuffer(parent).append('.').append(child).toString(); + } +} + + + + + + diff --git a/src/org/hibernate/Query.java b/src/org/hibernate/Query.java new file mode 100644 index 0000000000..ca3cc5c531 --- /dev/null +++ b/src/org/hibernate/Query.java @@ -0,0 +1,387 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Calendar; +import java.util.Collection; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import org.hibernate.transform.ResultTransformer; +import org.hibernate.type.Type; + +/** + * An object-oriented representation of a Hibernate query. A Query + * instance is obtained by calling Session.createQuery(). This + * interface exposes some extra functionality beyond that provided by + * Session.iterate() and Session.find(): + * + *
    + * Named query parameters are tokens of the form :name in the + * query string. A value is bound to the integer parameter + * :foo by calling
    + *
    + * setParameter("foo", foo, Hibernate.INTEGER);
    + *
    + * for example. A name may appear multiple times in the query string.
    + *
    + * JDBC-style ? parameters are also supported. To bind a + * value to a JDBC-style parameter use a set method that accepts an + * int positional argument (numbered from zero, contrary + * to JDBC).
    + *
    + * You may not mix and match JDBC-style parameters and named parameters + * in the same query.
    + *
    + * Queries are executed by calling list(), scroll() or + * iterate(). A query may be re-executed by subsequent invocations. + * Its lifespan is, however, bounded by the lifespan of the Session + * that created it.
    + *
    + * Implementors are not intended to be threadsafe. + * + * @see org.hibernate.Session#createQuery(java.lang.String) + * @see org.hibernate.ScrollableResults + * @author Gavin King + */ +public interface Query { + /** + * Get the query string. + * + * @return the query string + */ + public String getQueryString(); + /** + * Return the Hibernate types of the query result set. + * @return an array of types + */ + public Type[] getReturnTypes() throws HibernateException; + /** + * Return the HQL select clause aliases (if any) + * @return an array of aliases as strings + */ + public String[] getReturnAliases() throws HibernateException; + /** + * Return the names of all named parameters of the query. + * @return the parameter names, in no particular order + */ + public String[] getNamedParameters() throws HibernateException; + /** + * Return the query results as an Iterator. If the query + * contains multiple results pre row, the results are returned in + * an instance of Object[].
    + *
    + * Entities returned as results are initialized on demand. The first + * SQL query returns identifiers only.
    + * + * @return the result iterator + * @throws HibernateException + */ + public Iterator iterate() throws HibernateException; + /** + * Return the query results as ScrollableResults. The + * scrollability of the returned results depends upon JDBC driver + * support for scrollable ResultSets.
    + * + * @see ScrollableResults + * @return the result iterator + * @throws HibernateException + */ + public ScrollableResults scroll() throws HibernateException; + /** + * Return the query results as ScrollableResults. The + * scrollability of the returned results depends upon JDBC driver + * support for scrollable ResultSets.
    + * + * @see ScrollableResults + * @see ScrollMode + * @return the result iterator + * @throws HibernateException + */ + public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException; + /** + * Return the query results as a List. If the query contains + * multiple results pre row, the results are returned in an instance + * of Object[]. + * + * @return the result list + * @throws HibernateException + */ + public List list() throws HibernateException; + /** + * Convenience method to return a single instance that matches + * the query, or null if the query returns no results. + * + * @return the single result or null + * @throws NonUniqueResultException if there is more than one matching result + */ + public Object uniqueResult() throws HibernateException; + + /** + * Execute the update or delete statement. + *

    + * The semantics are compliant with the ejb3 Query.executeUpdate() + * method. + * + * @return The number of entities updated or deleted. + * @throws HibernateException + */ + public int executeUpdate() throws HibernateException; + + /** + * Set the maximum number of rows to retrieve. If not set, + * there is no limit to the number of rows retrieved. + * @param maxResults the maximum number of rows + */ + public Query setMaxResults(int maxResults); + /** + * Set the first row to retrieve. If not set, rows will be + * retrieved beginnning from row 0. + * @param firstResult a row number, numbered from 0 + */ + public Query setFirstResult(int firstResult); + + /** + * Entities retrieved by this query will be loaded in + * a read-only mode where Hibernate will never dirty-check + * them or make changes persistent. + * + */ + public Query setReadOnly(boolean readOnly); + + /** + * Enable caching of this query result set. + * @param cacheable Should the query results be cacheable? + */ + public Query setCacheable(boolean cacheable); + + /** + * Set the name of the cache region. + * @param cacheRegion the name of a query cache region, or null + * for the default query cache + */ + public Query setCacheRegion(String cacheRegion); + + /** + * Set a timeout for the underlying JDBC query. + * @param timeout the timeout in seconds + */ + public Query setTimeout(int timeout); + /** + * Set a fetch size for the underlying JDBC query. + * @param fetchSize the fetch size + */ + public Query setFetchSize(int fetchSize); + + /** + * Set the lockmode for the objects idententified by the + * given alias that appears in the FROM clause. + * @param alias a query alias, or this for a collection filter + */ + public Query setLockMode(String alias, LockMode lockMode); + + /** + * Add a comment to the generated SQL. + * @param comment a human-readable string + */ + public Query setComment(String comment); + + /** + * Override the current session flush mode, just for + * this query. + * @see org.hibernate.FlushMode + */ + public Query setFlushMode(FlushMode flushMode); + + /** + * Override the current session cache mode, just for + * this query. + * @see org.hibernate.CacheMode + */ + public Query setCacheMode(CacheMode cacheMode); + + /** + * Bind a value to a JDBC-style query parameter. + * @param position the position of the parameter in the query + * string, numbered from 0. + * @param val the possibly-null parameter value + * @param type the Hibernate type + */ + public Query setParameter(int position, Object val, Type type); + /** + * Bind a value to a named query parameter. + * @param name the name of the parameter + * @param val the possibly-null parameter value + * @param type the Hibernate type + */ + public Query setParameter(String name, Object val, Type type); + + /** + * Bind a value to a JDBC-style query parameter. The Hibernate type of the parameter is + * first detected via the usage/position in the query and if not sufficient secondly + * guessed from the class of the given object. + * @param position the position of the parameter in the query + * string, numbered from 0. + * @param val the non-null parameter value + * @throws org.hibernate.HibernateException if no type could be determined + */ + public Query setParameter(int position, Object val) throws HibernateException; + /** + * Bind a value to a named query parameter. The Hibernate type of the parameter is + * first detected via the usage/position in the query and if not sufficient secondly + * guessed from the class of the given object. + * @param name the name of the parameter + * @param val the non-null parameter value + * @throws org.hibernate.HibernateException if no type could be determined + */ + public Query setParameter(String name, Object val) throws HibernateException; + + /** + * Bind values and types to positional parameters. + */ + public Query setParameters(Object[] values, Type[] types) throws HibernateException; + + /** + * Bind multiple values to a named query parameter. This is useful for binding + * a list of values to an expression such as foo.bar in (:value_list). + * @param name the name of the parameter + * @param vals a collection of values to list + * @param type the Hibernate type of the values + */ + public Query setParameterList(String name, Collection vals, Type type) throws HibernateException; + + /** + * Bind multiple values to a named query parameter. The Hibernate type of the parameter is + * first detected via the usage/position in the query and if not sufficient secondly + * guessed from the class of the first object in the collection. This is useful for binding a list of values + * to an expression such as foo.bar in (:value_list). + * @param name the name of the parameter + * @param vals a collection of values to list + */ + public Query setParameterList(String name, Collection vals) throws HibernateException; + + /** + * Bind multiple values to a named query parameter. This is useful for binding + * a list of values to an expression such as foo.bar in (:value_list). + * @param name the name of the parameter + * @param vals a collection of values to list + * @param type the Hibernate type of the values + */ + public Query setParameterList(String name, Object[] vals, Type type) throws HibernateException; + + /** + * Bind multiple values to a named query parameter. The Hibernate type of the parameter is + * first detected via the usage/position in the query and if not sufficient secondly + * guessed from the class of the first object in the array. This is useful for binding a list of values + * to an expression such as foo.bar in (:value_list). + * @param name the name of the parameter + * @param vals a collection of values to list + */ + public Query setParameterList(String name, Object[] vals) throws HibernateException; + + /** + * Bind the property values of the given bean to named parameters of the query, + * matching property names with parameter names and mapping property types to + * Hibernate types using hueristics. + * @param bean any JavaBean or POJO + */ + public Query setProperties(Object bean) throws HibernateException; + + /** + * Bind the values of the given Map for each named parameters of the query, + * matching key names with parameter names and mapping value types to + * Hibernate types using hueristics. + * @param bean a java.util.Map + */ + public Query setProperties(Map bean) throws HibernateException; + + + public Query setString(int position, String val); + public Query setCharacter(int position, char val); + public Query setBoolean(int position, boolean val); + public Query setByte(int position, byte val); + public Query setShort(int position, short val); + public Query setInteger(int position, int val); + public Query setLong(int position, long val); + public Query setFloat(int position, float val); + public Query setDouble(int position, double val); + public Query setBinary(int position, byte[] val); + public Query setText(int position, String val); + public Query setSerializable(int position, Serializable val); + public Query setLocale(int position, Locale locale); + public Query setBigDecimal(int position, BigDecimal number); + public Query setBigInteger(int position, BigInteger number); + + public Query setDate(int position, Date date); + public Query setTime(int position, Date date); + public Query setTimestamp(int position, Date date); + + public Query setCalendar(int position, Calendar calendar); + public Query setCalendarDate(int position, Calendar calendar); + + public Query setString(String name, String val); + public Query setCharacter(String name, char val); + public Query setBoolean(String name, boolean val); + public Query setByte(String name, byte val); + public Query setShort(String name, short val); + public Query setInteger(String name, int val); + public Query setLong(String name, long val); + public Query setFloat(String name, float val); + public Query setDouble(String name, double val); + public Query setBinary(String name, byte[] val); + public Query setText(String name, String val); + public Query setSerializable(String name, Serializable val); + public Query setLocale(String name, Locale locale); + public Query setBigDecimal(String name, BigDecimal number); + public Query setBigInteger(String name, BigInteger number); + + public Query setDate(String name, Date date); + public Query setTime(String name, Date date); + public Query setTimestamp(String name, Date date); + + public Query setCalendar(String name, Calendar calendar); + public Query setCalendarDate(String name, Calendar calendar); + + /** + * Bind an instance of a mapped persistent class to a JDBC-style query parameter. + * @param position the position of the parameter in the query + * string, numbered from 0. + * @param val a non-null instance of a persistent class + */ + public Query setEntity(int position, Object val); // use setParameter for null values + + /** + * Bind an instance of a mapped persistent class to a named query parameter. + * @param name the name of the parameter + * @param val a non-null instance of a persistent class + */ + public Query setEntity(String name, Object val); // use setParameter for null values + + + /** + * Set a strategy for handling the query results. This can be used to change + * "shape" of the query result. + * + * @param transformer The transformer to apply + * @return this (for method chaining) + */ + public Query setResultTransformer(ResultTransformer transformer); + +} + + + + + + + diff --git a/src/org/hibernate/QueryException.java b/src/org/hibernate/QueryException.java new file mode 100644 index 0000000000..4faf787441 --- /dev/null +++ b/src/org/hibernate/QueryException.java @@ -0,0 +1,48 @@ +//$Id$ +package org.hibernate; + +/** + * A problem occurred translating a Hibernate query to SQL + * due to invalid query syntax, etc. + */ +public class QueryException extends HibernateException { + + private String queryString; + + public QueryException(String message) { + super(message); + } + public QueryException(String message, Throwable e) { + super(message, e); + } + + public QueryException(String message, String queryString) { + super(message); + this.queryString = queryString; + } + + public QueryException(Exception e) { + super(e); + } + public String getQueryString() { + return queryString; + } + + public void setQueryString(String queryString) { + this.queryString = queryString; + } + + public String getMessage() { + String msg = super.getMessage(); + if ( queryString!=null ) msg += " [" + queryString + ']'; + return msg; + } + +} + + + + + + + diff --git a/src/org/hibernate/QueryParameterException.java b/src/org/hibernate/QueryParameterException.java new file mode 100644 index 0000000000..9479e55e35 --- /dev/null +++ b/src/org/hibernate/QueryParameterException.java @@ -0,0 +1,26 @@ +//$Id: $ +package org.hibernate; + +/** + * Parameter invalid or not found in the query + * + * @author Emmanuel Bernard + */ +public class QueryParameterException extends QueryException { + + public QueryParameterException(Exception e) { + super( e ); + } + + public QueryParameterException(String message) { + super( message ); + } + + public QueryParameterException(String message, Throwable e) { + super( message, e ); + } + + public QueryParameterException(String message, String queryString) { + super( message, queryString ); + } +} diff --git a/src/org/hibernate/ReplicationMode.java b/src/org/hibernate/ReplicationMode.java new file mode 100644 index 0000000000..af585445fc --- /dev/null +++ b/src/org/hibernate/ReplicationMode.java @@ -0,0 +1,78 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +import org.hibernate.type.VersionType; + +/** + * Represents a replication strategy. + * + * @see Session#replicate(Object, ReplicationMode) + * @author Gavin King + */ +public abstract class ReplicationMode implements Serializable { + private final String name; + private static final Map INSTANCES = new HashMap(); + + public ReplicationMode(String name) { + this.name=name; + } + public String toString() { + return name; + } + public abstract boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType); + /** + * Throw an exception when a row already exists. + */ + public static final ReplicationMode EXCEPTION = new ReplicationMode("EXCEPTION") { + public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) { + throw new AssertionFailure("should not be called"); + } + }; + /** + * Ignore replicated entities when a row already exists. + */ + public static final ReplicationMode IGNORE = new ReplicationMode("IGNORE") { + public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) { + return false; + } + }; + /** + * Overwrite existing rows when a row already exists. + */ + public static final ReplicationMode OVERWRITE = new ReplicationMode("OVERWRITE") { + public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) { + return true; + } + }; + /** + * When a row already exists, choose the latest version. + */ + public static final ReplicationMode LATEST_VERSION = new ReplicationMode("LATEST_VERSION") { + public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) { + if (versionType==null) return true; //always overwrite nonversioned data + return versionType.getComparator().compare(currentVersion, newVersion) <= 0; + } + }; + + static { + INSTANCES.put( LATEST_VERSION.name, LATEST_VERSION ); + INSTANCES.put( IGNORE.name, IGNORE ); + INSTANCES.put( OVERWRITE.name, OVERWRITE ); + INSTANCES.put( EXCEPTION.name, EXCEPTION ); + } + + private Object readResolve() { + return INSTANCES.get(name); + } + +} + + + + + + diff --git a/src/org/hibernate/SQLQuery.java b/src/org/hibernate/SQLQuery.java new file mode 100755 index 0000000000..09330567c2 --- /dev/null +++ b/src/org/hibernate/SQLQuery.java @@ -0,0 +1,93 @@ +//$Id$ +package org.hibernate; + +import org.hibernate.type.Type; + +/** + * Allows the user to declare the types and select list injection + * points of all entities returned by the query. Also allows + * declaration of the type and column alias of any scalar results + * of the query. + * + * @author Gavin King + */ +public interface SQLQuery extends Query { + /** + * Declare a "root" entity, without specifying an alias + */ + public SQLQuery addEntity(String entityName); + /** + * Declare a "root" entity + */ + public SQLQuery addEntity(String alias, String entityName); + /** + * Declare a "root" entity, specifying a lock mode + */ + public SQLQuery addEntity(String alias, String entityName, LockMode lockMode); + /** + * Declare a "root" entity, without specifying an alias + */ + public SQLQuery addEntity(Class entityClass); + /** + * Declare a "root" entity + */ + public SQLQuery addEntity(String alias, Class entityClass); + /** + * Declare a "root" entity, specifying a lock mode + */ + public SQLQuery addEntity(String alias, Class entityClass, LockMode lockMode); + + /** + * Declare a "joined" entity + */ + public SQLQuery addJoin(String alias, String path); + /** + * Declare a "joined" entity, specifying a lock mode + */ + public SQLQuery addJoin(String alias, String path, LockMode lockMode); + + /** + * Declare a scalar query result + */ + public SQLQuery addScalar(String columnAlias, Type type); + + /** + * Declare a scalar query. Hibernate will attempt to automatically detect the underlying type. + */ + public SQLQuery addScalar(String columnAlias); + + /** + * Use a predefined named ResultSetMapping + */ + public SQLQuery setResultSetMapping(String name); + + /** + * Adds a query space for auto-flush synchronization. + * + * @param querySpace The query space to be auto-flushed for this query. + * @return this, for method chaning + */ + public SQLQuery addSynchronizedQuerySpace(String querySpace); + + /** + * Adds an entity name or auto-flush synchronization. + * + * @param entityName The name of the entity upon whose defined + * query spaces we should additionally synchronize. + * @return this, for method chaning + * @throws MappingException Indicates the given entity name could not be + * resolved. + */ + public SQLQuery addSynchronizedEntityName(String entityName) throws MappingException; + + /** + * Adds an entity name or auto-flush synchronization. + * + * @param entityClass The class of the entity upon whose defined + * query spaces we should additionally synchronize. + * @return this, for method chaning + * @throws MappingException Indicates the given entity class could not be + * resolved. + */ + public SQLQuery addSynchronizedEntityClass(Class entityClass) throws MappingException; +} diff --git a/src/org/hibernate/ScrollMode.java b/src/org/hibernate/ScrollMode.java new file mode 100755 index 0000000000..da6b24656b --- /dev/null +++ b/src/org/hibernate/ScrollMode.java @@ -0,0 +1,74 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.sql.ResultSet; +import java.util.HashMap; +import java.util.Map; + +/** + * Specifies the type of JDBC scrollable result set to use + * underneath a ScrollableResults + * + * @see Query#scroll(ScrollMode) + * @see ScrollableResults + * @author Gavin King + */ +public final class ScrollMode implements Serializable { + private final int resultSetType; + private final String name; + private static final Map INSTANCES = new HashMap(); + + private ScrollMode(int level, String name) { + this.resultSetType=level; + this.name=name; + } + + public String toString() { + return name; + } + + /** + * @return the JDBC result set type code + */ + public int toResultSetType() { + return resultSetType; + } + + /** + * @see java.sql.ResultSet.TYPE_FORWARD_ONLY + */ + public static final ScrollMode FORWARD_ONLY = new ScrollMode(ResultSet.TYPE_FORWARD_ONLY, "FORWARD_ONLY"); + /** + * @see java.sql.ResultSet.TYPE_SCROLL_SENSITIVE + */ + public static final ScrollMode SCROLL_SENSITIVE = new ScrollMode(ResultSet.TYPE_SCROLL_SENSITIVE, "SCROLL_SENSITIVE"); + /** + * Note that since the Hibernate session acts as a cache, you + * might need to expicitly evict objects, if you need to see + * changes made by other transactions. + * @see java.sql.ResultSet.TYPE_SCROLL_INSENSITIVE + */ + public static final ScrollMode SCROLL_INSENSITIVE = new ScrollMode(ResultSet.TYPE_SCROLL_INSENSITIVE, "SCROLL_INSENSITIVE"); + + public boolean lessThan(ScrollMode other) { + return this.resultSetTypeQuery / ScrollableResults + * pattern is very similar to the JDBC PreparedStatement/ + * ResultSet pattern and the semantics of methods of this interface + * are similar to the similarly named methods on ResultSet.
    + *
    + * Contrary to JDBC, columns of results are numbered from zero. + * + * @see Query#scroll() + * @author Gavin King + */ +public interface ScrollableResults { + /** + * Advance to the next result + * @return true if there is another result + */ + public boolean next() throws HibernateException; + /** + * Retreat to the previous result + * @return true if there is a previous result + */ + public boolean previous() throws HibernateException; + /** + * Scroll an arbitrary number of locations + * @param i a positive (forward) or negative (backward) number of rows + * @return true if there is a result at the new location + */ + public boolean scroll(int i) throws HibernateException; + /** + * Go to the last result + * @return true if there are any results + */ + public boolean last() throws HibernateException; + /** + * Go to the first result + * @return true if there are any results + */ + public boolean first() throws HibernateException; + /** + * Go to a location just before first result (this is the initial location) + */ + public void beforeFirst() throws HibernateException; + /** + * Go to a location just after the last result + */ + public void afterLast() throws HibernateException; + /** + * Is this the first result? + * + * @return true if this is the first row of results + * @throws HibernateException + */ + public boolean isFirst() throws HibernateException; + /** + * Is this the last result? + * + * @return true if this is the last row of results + * @throws HibernateException + */ + public boolean isLast() throws HibernateException; + /** + * Release resources immediately. + */ + public void close() throws HibernateException; + /** + * Get the current row of results + * @return an object or array + */ + public Object[] get() throws HibernateException; + /** + * Get the ith object in the current row of results, without + * initializing any other results in the row. This method may be used + * safely, regardless of the type of the column (ie. even for scalar + * results). + * @param i the column, numbered from zero + * @return an object of any Hibernate type or null + */ + public Object get(int i) throws HibernateException; + + /** + * Get the type of the ith column of results + * @param i the column, numbered from zero + * @return the Hibernate type + */ + public Type getType(int i); + + /** + * Convenience method to read an integer + */ + public Integer getInteger(int col) throws HibernateException; + /** + * Convenience method to read a long + */ + public Long getLong(int col) throws HibernateException; + /** + * Convenience method to read a float + */ + public Float getFloat(int col) throws HibernateException; + /** + * Convenience method to read a boolean + */ + public Boolean getBoolean(int col) throws HibernateException; + /** + * Convenience method to read a double + */ + public Double getDouble(int col) throws HibernateException; + /** + * Convenience method to read a short + */ + public Short getShort(int col) throws HibernateException; + /** + * Convenience method to read a byte + */ + public Byte getByte(int col) throws HibernateException; + /** + * Convenience method to read a character + */ + public Character getCharacter(int col) throws HibernateException; + /** + * Convenience method to read a binary + */ + public byte[] getBinary(int col) throws HibernateException; + /** + * Convenience method to read text + */ + public String getText(int col) throws HibernateException; + /** + * Convenience method to read a blob + */ + public Blob getBlob(int col) throws HibernateException; + /** + * Convenience method to read a clob + */ + public Clob getClob(int col) throws HibernateException; + /** + * Convenience method to read a string + */ + public String getString(int col) throws HibernateException; + /** + * Convenience method to read a big_decimal + */ + public BigDecimal getBigDecimal(int col) throws HibernateException; + /** + * Convenience method to read a big_integer + */ + public BigInteger getBigInteger(int col) throws HibernateException; + /** + * Convenience method to read a date, time or timestamp + */ + public Date getDate(int col) throws HibernateException; + /** + * Convenience method to read a locale + */ + public Locale getLocale(int col) throws HibernateException; + /** + * Convenience method to read a calendar or calendar_date + */ + public Calendar getCalendar(int col) throws HibernateException; + /** + * Convenience method to read a currency + */ + //public Currency getCurrency(int col) throws HibernateException; + /** + * Convenience method to read a timezone + */ + public TimeZone getTimeZone(int col) throws HibernateException; + /** + * Get the current location in the result set. The first + * row is number 0, contrary to JDBC. + * @return the row number, numbered from 0, or -1 if + * there is no current row + */ + public int getRowNumber() throws HibernateException; + /** + * Set the current location in the result set, numbered from either the + * first row (row number 0), or the last row (row + * number -1). + * @param rowNumber the row number, numbered from the last row, in the + * case of a negative row number + * @return true if there is a row at that row number + */ + public boolean setRowNumber(int rowNumber) throws HibernateException; +} + + + + + + diff --git a/src/org/hibernate/Session.java b/src/org/hibernate/Session.java new file mode 100644 index 0000000000..1687e73786 --- /dev/null +++ b/src/org/hibernate/Session.java @@ -0,0 +1,779 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.sql.Connection; + +import org.hibernate.stat.SessionStatistics; + +/** + * The main runtime interface between a Java application and Hibernate. This is the + * central API class abstracting the notion of a persistence service.
    + *
    + * The lifecycle of a Session is bounded by the beginning and end of a logical + * transaction. (Long transactions might span several database transactions.)
    + *
    + * The main function of the Session is to offer create, read and delete operations + * for instances of mapped entity classes. Instances may exist in one of three states:
    + *
    + * transient: never persistent, not associated with any Session
    + * persistent: associated with a unique Session
    + * detached: previously persistent, not associated with any Session
    + *
    + * Transient instances may be made persistent by calling save(), + * persist() or saveOrUpdate(). Persistent instances may be made transient + * by calling delete(). Any instance returned by a get() or + * load() method is persistent. Detached instances may be made persistent + * by calling update(), saveOrUpdate(), lock() or replicate(). + * The state of a transient or detached instance may also be made persistent as a new + * persistent instance by calling merge().
    + *
    + * save() and persist() result in an SQL INSERT, delete() + * in an SQL DELETE and update() or merge() in an SQL UPDATE. + * Changes to persistent instances are detected at flush time and also result in an SQL + * UPDATE. saveOrUpdate() and replicate() result in either an + * INSERT or an UPDATE.
    + *
    + * It is not intended that implementors be threadsafe. Instead each thread/transaction + * should obtain its own instance from a SessionFactory.
    + *
    + * A Session instance is serializable if its persistent classes are serializable.
    + *
    + * A typical transaction should use the following idiom: + *
    + * Session sess = factory.openSession();
    + * Transaction tx;
    + * try {
    + *     tx = sess.beginTransaction();
    + *     //do some work
    + *     ...
    + *     tx.commit();
    + * }
    + * catch (Exception e) {
    + *     if (tx!=null) tx.rollback();
    + *     throw e;
    + * }
    + * finally {
    + *     sess.close();
    + * }
    + * 
    + *
    + * If the Session throws an exception, the transaction must be rolled back + * and the session discarded. The internal state of the Session might not + * be consistent with the database after the exception occurs. + * + * @see SessionFactory + * @author Gavin King + */ +public interface Session extends Serializable { + + /** + * Retrieve the entity mode in effect for this session. + * + * @return The entity mode for this session. + */ + public EntityMode getEntityMode(); + + /** + * Starts a new Session with the given entity mode in effect. This secondary + * Session inherits the connection, transaction, and other context + * information from the primary Session. It doesn't need to be flushed + * or closed by the developer. + * + * @param entityMode The entity mode to use for the new session. + * @return The new session + */ + public Session getSession(EntityMode entityMode); + + /** + * Force this session to flush. Must be called at the end of a + * unit of work, before commiting the transaction and closing the + * session (depending on {@link #setFlushMode flush-mode}, + * {@link Transaction#commit()} calls this method). + *

    + * Flushing is the process of synchronizing the underlying persistent + * store with persistable state held in memory. + * + * @throws HibernateException Indicates problems flushing the session or + * talking to the database. + */ + public void flush() throws HibernateException; + + /** + * Set the flush mode for this session. + *

    + * The flush mode determines the points at which the session is flushed. + * Flushing is the process of synchronizing the underlying persistent + * store with persistable state held in memory. + *

    + * For a logically "read only" session, it is reasonable to set the session's + * flush mode to {@link FlushMode#MANUAL} at the start of the session (in + * order to achieve some extra performance). + * + * @param flushMode the new flush mode + * @see FlushMode + */ + public void setFlushMode(FlushMode flushMode); + + /** + * Get the current flush mode for this session. + * + * @return The flush mode + */ + public FlushMode getFlushMode(); + + /** + * Set the cache mode. + *

    + * Cache mode determines the manner in which this session can interact with + * the second level cache. + * + * @param cacheMode The new cache mode. + */ + public void setCacheMode(CacheMode cacheMode); + + /** + * Get the current cache mode. + * + * @return The current cache mode. + */ + public CacheMode getCacheMode(); + + /** + * Get the session factory which created this session. + * + * @return The session factory. + * @see SessionFactory + + */ + public SessionFactory getSessionFactory(); + + /** + * Get the JDBC connection of this Session.
    + *
    + * If the session is using aggressive collection release (as in a + * CMT environment), it is the application's responsibility to + * close the connection returned by this call. Otherwise, the + * application should not close the connection. + * + * @return the JDBC connection in use by the Session + * @throws HibernateException if the Session is disconnected + * @deprecated To be replaced with a SPI for performing work against the connection; scheduled for removal in 4.x + */ + public Connection connection() throws HibernateException; + + /** + * End the session by releasing the JDBC connection and cleaning up. It is + * not strictly necessary to close the session but you must at least + * {@link #disconnect()} it. + * + * @return the connection provided by the application or null. + * @throws HibernateException Indicates problems cleaning up. + */ + public Connection close() throws HibernateException; + + /** + * Cancel the execution of the current query. + *

    + * This is the sole method on session which may be safely called from + * another thread. + * + * @throws HibernateException There was a problem canceling the query + */ + public void cancelQuery() throws HibernateException; + + /** + * Check if the session is still open. + * + * @return boolean + */ + public boolean isOpen(); + + /** + * Check if the session is currently connected. + * + * @return boolean + */ + public boolean isConnected(); + + /** + * Does this session contain any changes which must be synchronized with + * the database? In other words, would any DML operations be executed if + * we flushed this session? + * + * @return True if the session contains pending changes; false otherwise. + * @throws HibernateException could not perform dirtying checking + */ + public boolean isDirty() throws HibernateException; + + /** + * Return the identifier value of the given entity as associated with this + * session. An exception is thrown if the given entity instance is transient + * or detached in relation to this session. + * + * @param object a persistent instance + * @return the identifier + * @throws TransientObjectException if the instance is transient or associated with + * a different session + */ + public Serializable getIdentifier(Object object) throws HibernateException; + + /** + * Check if this instance is associated with this Session. + * + * @param object an instance of a persistent class + * @return true if the given instance is associated with this Session + */ + public boolean contains(Object object); + + /** + * Remove this instance from the session cache. Changes to the instance will + * not be synchronized with the database. This operation cascades to associated + * instances if the association is mapped with cascade="evict". + * + * @param object a persistent instance + * @throws HibernateException + */ + public void evict(Object object) throws HibernateException; + + /** + * Return the persistent instance of the given entity class with the given identifier, + * obtaining the specified lock mode, assuming the instance exists. + * + * @param theClass a persistent class + * @param id a valid identifier of an existing persistent instance of the class + * @param lockMode the lock level + * @return the persistent instance or proxy + * @throws HibernateException + */ + public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException; + + /** + * Return the persistent instance of the given entity class with the given identifier, + * obtaining the specified lock mode, assuming the instance exists. + * + * @param entityName a persistent class + * @param id a valid identifier of an existing persistent instance of the class + * @param lockMode the lock level + * @return the persistent instance or proxy + * @throws HibernateException + */ + public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException; + + /** + * Return the persistent instance of the given entity class with the given identifier, + * assuming that the instance exists. + *

    + * You should not use this method to determine if an instance exists (use get() + * instead). Use this only to retrieve an instance that you assume exists, where non-existence + * would be an actual error. + * + * @param theClass a persistent class + * @param id a valid identifier of an existing persistent instance of the class + * @return the persistent instance or proxy + * @throws HibernateException + */ + public Object load(Class theClass, Serializable id) throws HibernateException; + + /** + * Return the persistent instance of the given entity class with the given identifier, + * assuming that the instance exists. + *

    + * You should not use this method to determine if an instance exists (use get() + * instead). Use this only to retrieve an instance that you assume exists, where non-existence + * would be an actual error. + * + * @param entityName a persistent class + * @param id a valid identifier of an existing persistent instance of the class + * @return the persistent instance or proxy + * @throws HibernateException + */ + public Object load(String entityName, Serializable id) throws HibernateException; + + /** + * Read the persistent state associated with the given identifier into the given transient + * instance. + * + * @param object an "empty" instance of the persistent class + * @param id a valid identifier of an existing persistent instance of the class + * @throws HibernateException + */ + public void load(Object object, Serializable id) throws HibernateException; + + /** + * Persist the state of the given detached instance, reusing the current + * identifier value. This operation cascades to associated instances if + * the association is mapped with cascade="replicate". + * + * @param object a detached instance of a persistent class + */ + public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException; + + /** + * Persist the state of the given detached instance, reusing the current + * identifier value. This operation cascades to associated instances if + * the association is mapped with cascade="replicate". + * + * @param object a detached instance of a persistent class + */ + public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException; + + /** + * Persist the given transient instance, first assigning a generated identifier. (Or + * using the current value of the identifier property if the assigned + * generator is used.) This operation cascades to associated instances if the + * association is mapped with cascade="save-update". + * + * @param object a transient instance of a persistent class + * @return the generated identifier + * @throws HibernateException + */ + public Serializable save(Object object) throws HibernateException; + + /** + * Persist the given transient instance, first assigning a generated identifier. (Or + * using the current value of the identifier property if the assigned + * generator is used.) This operation cascades to associated instances if the + * association is mapped with cascade="save-update". + * + * @param object a transient instance of a persistent class + * @return the generated identifier + * @throws HibernateException + */ + public Serializable save(String entityName, Object object) throws HibernateException; + + /** + * Either {@link #save(Object)} or {@link #update(Object)} the given + * instance, depending upon resolution of the unsaved-value checks (see the + * manual for discussion of unsaved-value checking). + *

    + * This operation cascades to associated instances if the association is mapped + * with cascade="save-update". + * + * @see Session#save(Object) + * @see Session#update(Object) + * @param object a transient or detached instance containing new or updated state + * @throws HibernateException + */ + public void saveOrUpdate(Object object) throws HibernateException; + + /** + * Either {@link #save(String, Object)} or {@link #update(String, Object)} + * the given instance, depending upon resolution of the unsaved-value checks + * (see the manual for discussion of unsaved-value checking). + *

    + * This operation cascades to associated instances if the association is mapped + * with cascade="save-update". + * + * @see Session#save(String,Object) + * @see Session#update(String,Object) + * @param entityName The name of the entity + * @param object a transient or detached instance containing new or updated state + * @throws HibernateException + */ + public void saveOrUpdate(String entityName, Object object) throws HibernateException; + + /** + * Update the persistent instance with the identifier of the given detached + * instance. If there is a persistent instance with the same identifier, + * an exception is thrown. This operation cascades to associated instances + * if the association is mapped with cascade="save-update". + * + * @param object a detached instance containing updated state + * @throws HibernateException + */ + public void update(Object object) throws HibernateException; + + /** + * Update the persistent instance with the identifier of the given detached + * instance. If there is a persistent instance with the same identifier, + * an exception is thrown. This operation cascades to associated instances + * if the association is mapped with cascade="save-update". + * + * @param object a detached instance containing updated state + * @throws HibernateException + */ + public void update(String entityName, Object object) throws HibernateException; + + /** + * Copy the state of the given object onto the persistent object with the same + * identifier. If there is no persistent instance currently associated with + * the session, it will be loaded. Return the persistent instance. If the + * given instance is unsaved, save a copy of and return it as a newly persistent + * instance. The given instance does not become associated with the session. + * This operation cascades to associated instances if the association is mapped + * with cascade="merge".
    + *
    + * The semantics of this method are defined by JSR-220. + * + * @param object a detached instance with state to be copied + * @return an updated persistent instance + */ + public Object merge(Object object) throws HibernateException; + + /** + * Copy the state of the given object onto the persistent object with the same + * identifier. If there is no persistent instance currently associated with + * the session, it will be loaded. Return the persistent instance. If the + * given instance is unsaved, save a copy of and return it as a newly persistent + * instance. The given instance does not become associated with the session. + * This operation cascades to associated instances if the association is mapped + * with cascade="merge".
    + *
    + * The semantics of this method are defined by JSR-220. + * + * @param object a detached instance with state to be copied + * @return an updated persistent instance + */ + public Object merge(String entityName, Object object) throws HibernateException; + + /** + * Make a transient instance persistent. This operation cascades to associated + * instances if the association is mapped with cascade="persist".
    + *
    + * The semantics of this method are defined by JSR-220. + * + * @param object a transient instance to be made persistent + */ + public void persist(Object object) throws HibernateException; + /** + * Make a transient instance persistent. This operation cascades to associated + * instances if the association is mapped with cascade="persist".
    + *
    + * The semantics of this method are defined by JSR-220. + * + * @param object a transient instance to be made persistent + */ + public void persist(String entityName, Object object) throws HibernateException; + + /** + * Remove a persistent instance from the datastore. The argument may be + * an instance associated with the receiving Session or a transient + * instance with an identifier associated with existing persistent state. + * This operation cascades to associated instances if the association is mapped + * with cascade="delete". + * + * @param object the instance to be removed + * @throws HibernateException + */ + public void delete(Object object) throws HibernateException; + + /** + * Remove a persistent instance from the datastore. The object argument may be + * an instance associated with the receiving Session or a transient + * instance with an identifier associated with existing persistent state. + * This operation cascades to associated instances if the association is mapped + * with cascade="delete". + * + * @param entityName The entity name for the instance to be removed. + * @param object the instance to be removed + * @throws HibernateException + */ + public void delete(String entityName, Object object) throws HibernateException; + + /** + * Obtain the specified lock level upon the given object. This may be used to + * perform a version check (LockMode.READ), to upgrade to a pessimistic + * lock (LockMode.UPGRADE), or to simply reassociate a transient instance + * with a session (LockMode.NONE). This operation cascades to associated + * instances if the association is mapped with cascade="lock". + * + * @param object a persistent or transient instance + * @param lockMode the lock level + * @throws HibernateException + */ + public void lock(Object object, LockMode lockMode) throws HibernateException; + + /** + * Obtain the specified lock level upon the given object. This may be used to + * perform a version check (LockMode.READ), to upgrade to a pessimistic + * lock (LockMode.UPGRADE), or to simply reassociate a transient instance + * with a session (LockMode.NONE). This operation cascades to associated + * instances if the association is mapped with cascade="lock". + * + * @param object a persistent or transient instance + * @param lockMode the lock level + * @throws HibernateException + */ + public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException; + + /** + * Re-read the state of the given instance from the underlying database. It is + * inadvisable to use this to implement long-running sessions that span many + * business tasks. This method is, however, useful in certain special circumstances. + * For example + *

    + * + * @param object a persistent or detached instance + * @throws HibernateException + */ + public void refresh(Object object) throws HibernateException; + + /** + * Re-read the state of the given instance from the underlying database, with + * the given LockMode. It is inadvisable to use this to implement + * long-running sessions that span many business tasks. This method is, however, + * useful in certain special circumstances. + * + * @param object a persistent or detached instance + * @param lockMode the lock mode to use + * @throws HibernateException + */ + public void refresh(Object object, LockMode lockMode) throws HibernateException; + + /** + * Determine the current lock mode of the given object. + * + * @param object a persistent instance + * @return the current lock mode + * @throws HibernateException + */ + public LockMode getCurrentLockMode(Object object) throws HibernateException; + + /** + * Begin a unit of work and return the associated Transaction object. + * If a new underlying transaction is required, begin the transaction. Otherwise + * continue the new work in the context of the existing underlying transaction. + * The class of the returned Transaction object is determined by the + * property hibernate.transaction_factory. + * + * @return a Transaction instance + * @throws HibernateException + * @see Transaction + */ + public Transaction beginTransaction() throws HibernateException; + + /** + * Get the Transaction instance associated with this session. + * The class of the returned Transaction object is determined by the + * property hibernate.transaction_factory. + * + * @return a Transaction instance + * @throws HibernateException + * @see Transaction + */ + public Transaction getTransaction(); + + /** + * Create a new Criteria instance, for the given entity class, + * or a superclass of an entity class. + * + * @param persistentClass a class, which is persistent, or has persistent subclasses + * @return Criteria + */ + public Criteria createCriteria(Class persistentClass); + + /** + * Create a new Criteria instance, for the given entity class, + * or a superclass of an entity class, with the given alias. + * + * @param persistentClass a class, which is persistent, or has persistent subclasses + * @return Criteria + */ + public Criteria createCriteria(Class persistentClass, String alias); + + /** + * Create a new Criteria instance, for the given entity name. + * + * @param entityName + * @return Criteria + */ + public Criteria createCriteria(String entityName); + + /** + * Create a new Criteria instance, for the given entity name, + * with the given alias. + * + * @param entityName + * @return Criteria + */ + public Criteria createCriteria(String entityName, String alias); + + /** + * Create a new instance of Query for the given HQL query string. + * + * @param queryString a HQL query + * @return Query + * @throws HibernateException + */ + public Query createQuery(String queryString) throws HibernateException; + + /** + * Create a new instance of SQLQuery for the given SQL query string. + * + * @param queryString a SQL query + * @return SQLQuery + * @throws HibernateException + */ + public SQLQuery createSQLQuery(String queryString) throws HibernateException; + + /** + * Create a new instance of Query for the given collection and filter string. + * + * @param collection a persistent collection + * @param queryString a Hibernate query + * @return Query + * @throws HibernateException + */ + public Query createFilter(Object collection, String queryString) throws HibernateException; + + /** + * Obtain an instance of Query for a named query string defined in the + * mapping file. + * + * @param queryName the name of a query defined externally + * @return Query + * @throws HibernateException + */ + public Query getNamedQuery(String queryName) throws HibernateException; + + /** + * Completely clear the session. Evict all loaded instances and cancel all pending + * saves, updates and deletions. Do not close open iterators or instances of + * ScrollableResults. + */ + public void clear(); + + /** + * Return the persistent instance of the given entity class with the given identifier, + * or null if there is no such persistent instance. (If the instance, or a proxy for the + * instance, is already associated with the session, return that instance or proxy.) + * + * @param clazz a persistent class + * @param id an identifier + * @return a persistent instance or null + * @throws HibernateException + */ + public Object get(Class clazz, Serializable id) throws HibernateException; + + /** + * Return the persistent instance of the given entity class with the given identifier, + * or null if there is no such persistent instance. Obtain the specified lock mode + * if the instance exists. + * + * @param clazz a persistent class + * @param id an identifier + * @param lockMode the lock mode + * @return a persistent instance or null + * @throws HibernateException + */ + public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException; + + /** + * Return the persistent instance of the given named entity with the given identifier, + * or null if there is no such persistent instance. (If the instance, or a proxy for the + * instance, is already associated with the session, return that instance or proxy.) + * + * @param entityName the entity name + * @param id an identifier + * @return a persistent instance or null + * @throws HibernateException + */ + public Object get(String entityName, Serializable id) throws HibernateException; + + /** + * Return the persistent instance of the given entity class with the given identifier, + * or null if there is no such persistent instance. Obtain the specified lock mode + * if the instance exists. + * + * @param entityName the entity name + * @param id an identifier + * @param lockMode the lock mode + * @return a persistent instance or null + * @throws HibernateException + */ + public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException; + + + /** + * Return the entity name for a persistent entity + * + * @param object a persistent entity + * @return the entity name + * @throws HibernateException + */ + public String getEntityName(Object object) throws HibernateException; + + /** + * Enable the named filter for this current session. + * + * @param filterName The name of the filter to be enabled. + * @return The Filter instance representing the enabled fiter. + */ + public Filter enableFilter(String filterName); + + /** + * Retrieve a currently enabled filter by name. + * + * @param filterName The name of the filter to be retrieved. + * @return The Filter instance representing the enabled fiter. + */ + public Filter getEnabledFilter(String filterName); + + /** + * Disable the named filter for the current session. + * + * @param filterName The name of the filter to be disabled. + */ + public void disableFilter(String filterName); + + /** + * Get the statistics for this session. + */ + public SessionStatistics getStatistics(); + + /** + * Set an unmodified persistent object to read only mode, or a read only + * object to modifiable mode. In read only mode, no snapshot is maintained + * and the instance is never dirty checked. + * + * @see Query#setReadOnly(boolean) + */ + public void setReadOnly(Object entity, boolean readOnly); + + + + /** + * Disconnect the Session from the current JDBC connection. If + * the connection was obtained by Hibernate close it and return it to + * the connection pool; otherwise, return it to the application. + *

    + * This is used by applications which supply JDBC connections to Hibernate + * and which require long-sessions (or long-conversations) + *

    + * Note that disconnect() called on a session where the connection was + * retrieved by Hibernate through its configured + * {@link org.hibernate.connection.ConnectionProvider} has no effect, + * provided {@link ConnectionReleaseMode#ON_CLOSE} is not in effect. + * + * @return the application-supplied connection or null + * @see #reconnect(Connection) + * @see #reconnect() + */ + Connection disconnect() throws HibernateException; + + /** + * Obtain a new JDBC connection. This is used by applications which + * require long transactions and do not supply connections to the + * session. + * + * @see #disconnect() + * @deprecated Manual reconnection is only needed in the case of + * application-supplied connections, in which case the + * {@link #reconnect(java.sql.Connection)} for should be used. + */ + void reconnect() throws HibernateException; + + /** + * Reconnect to the given JDBC connection. This is used by applications + * which require long transactions and use application-supplied connections. + * + * @param connection a JDBC connection + * @see #disconnect() + */ + void reconnect(Connection connection) throws HibernateException; +} diff --git a/src/org/hibernate/SessionException.java b/src/org/hibernate/SessionException.java new file mode 100755 index 0000000000..c8955f60ea --- /dev/null +++ b/src/org/hibernate/SessionException.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate; + +/** + * Thrown when the user calls a method of a {@link Session} that is in an + * inappropropriate state for the given call (for example, the the session + * is closed or disconnected). + * + * @author Gavin King + */ +public class SessionException extends HibernateException { + + /** + * Constructs a new SessionException with the given message. + * + * @param message The message indicating the specific problem. + */ + public SessionException(String message) { + super( message ); + } + +} diff --git a/src/org/hibernate/SessionFactory.java b/src/org/hibernate/SessionFactory.java new file mode 100644 index 0000000000..a98b7141b2 --- /dev/null +++ b/src/org/hibernate/SessionFactory.java @@ -0,0 +1,224 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.sql.Connection; +import java.util.Map; +import java.util.Set; + +import javax.naming.Referenceable; + +import org.hibernate.metadata.ClassMetadata; +import org.hibernate.metadata.CollectionMetadata; +import org.hibernate.stat.Statistics; +import org.hibernate.engine.FilterDefinition; + +/** + * Creates Sessions. Usually an application has a single SessionFactory. + * Threads servicing client requests obtain Sessions from the factory.
    + *
    + * Implementors must be threadsafe.
    + *
    + * SessionFactorys are immutable. The behaviour of a SessionFactory is + * controlled by properties supplied at configuration time. These properties are defined + * on Environment. + * + * @see Session + * @see org.hibernate.cfg.Environment + * @see org.hibernate.cfg.Configuration + * @see org.hibernate.connection.ConnectionProvider + * @see org.hibernate.transaction.TransactionFactory + * @author Gavin King + */ +public interface SessionFactory extends Referenceable, Serializable { + + /** + * Open a Session on the given connection. + *

    + * Note that the second-level cache will be disabled if you + * supply a JDBC connection. Hibernate will not be able to track + * any statements you might have executed in the same transaction. + * Consider implementing your own ConnectionProvider. + * + * @param connection a connection provided by the application. + * @return Session + */ + public org.hibernate.classic.Session openSession(Connection connection); + + /** + * Create database connection and open a Session on it, specifying an + * interceptor. + * + * @param interceptor a session-scoped interceptor + * @return Session + * @throws HibernateException + */ + public org.hibernate.classic.Session openSession(Interceptor interceptor) throws HibernateException; + + /** + * Open a Session on the given connection, specifying an interceptor. + *

    + * Note that the second-level cache will be disabled if you + * supply a JDBC connection. Hibernate will not be able to track + * any statements you might have executed in the same transaction. + * Consider implementing your own ConnectionProvider. + * + * @param connection a connection provided by the application. + * @param interceptor a session-scoped interceptor + * @return Session + */ + public org.hibernate.classic.Session openSession(Connection connection, Interceptor interceptor); + + /** + * Create database connection and open a Session on it. + * + * @return Session + * @throws HibernateException + */ + public org.hibernate.classic.Session openSession() throws HibernateException; + + /** + * Obtains the current session. The definition of what exactly "current" + * means controlled by the {@link org.hibernate.context.CurrentSessionContext} impl configured + * for use. + *

    + * Note that for backwards compatibility, if a {@link org.hibernate.context.CurrentSessionContext} + * is not configured but a JTA {@link org.hibernate.transaction.TransactionManagerLookup} + * is configured this will default to the {@link org.hibernate.context.JTASessionContext} + * impl. + * + * @return The current session. + * @throws HibernateException Indicates an issue locating a suitable current session. + */ + public org.hibernate.classic.Session getCurrentSession() throws HibernateException; + + /** + * Get the ClassMetadata associated with the given entity class + * + * @see org.hibernate.metadata.ClassMetadata + */ + public ClassMetadata getClassMetadata(Class persistentClass) throws HibernateException; + + /** + * Get the ClassMetadata associated with the given entity name + * + * @see org.hibernate.metadata.ClassMetadata + * @since 3.0 + */ + public ClassMetadata getClassMetadata(String entityName) throws HibernateException; + + /** + * Get the CollectionMetadata associated with the named collection role + * + * @see org.hibernate.metadata.CollectionMetadata + */ + public CollectionMetadata getCollectionMetadata(String roleName) throws HibernateException; + + + /** + * Get all ClassMetadata as a Map from entityname String + * to metadata object + * + * @see org.hibernate.metadata.ClassMetadata + * @return a map from String an entity name to ClassMetaData + * @since 3.0 changed key from Class to String + */ + public Map getAllClassMetadata() throws HibernateException; + + /** + * Get all CollectionMetadata as a Map from role name + * to metadata object + * + * @see org.hibernate.metadata.CollectionMetadata + * @return a map from String to CollectionMetadata + */ + public Map getAllCollectionMetadata() throws HibernateException; + + /** + * Get the statistics for this session factory + */ + public Statistics getStatistics(); + + /** + * Destroy this SessionFactory and release all resources (caches, + * connection pools, etc). It is the responsibility of the application + * to ensure that there are no open Sessions before calling + * close(). + */ + public void close() throws HibernateException; + + /** + * Was this SessionFactory already closed? + */ + public boolean isClosed(); + + /** + * Evict all entries from the second-level cache. This method occurs outside + * of any transaction; it performs an immediate "hard" remove, so does not respect + * any transaction isolation semantics of the usage strategy. Use with care. + */ + public void evict(Class persistentClass) throws HibernateException; + /** + * Evict an entry from the second-level cache. This method occurs outside + * of any transaction; it performs an immediate "hard" remove, so does not respect + * any transaction isolation semantics of the usage strategy. Use with care. + */ + public void evict(Class persistentClass, Serializable id) throws HibernateException; + /** + * Evict all entries from the second-level cache. This method occurs outside + * of any transaction; it performs an immediate "hard" remove, so does not respect + * any transaction isolation semantics of the usage strategy. Use with care. + */ + public void evictEntity(String entityName) throws HibernateException; + /** + * Evict an entry from the second-level cache. This method occurs outside + * of any transaction; it performs an immediate "hard" remove, so does not respect + * any transaction isolation semantics of the usage strategy. Use with care. + */ + public void evictEntity(String entityName, Serializable id) throws HibernateException; + /** + * Evict all entries from the second-level cache. This method occurs outside + * of any transaction; it performs an immediate "hard" remove, so does not respect + * any transaction isolation semantics of the usage strategy. Use with care. + */ + public void evictCollection(String roleName) throws HibernateException; + /** + * Evict an entry from the second-level cache. This method occurs outside + * of any transaction; it performs an immediate "hard" remove, so does not respect + * any transaction isolation semantics of the usage strategy. Use with care. + */ + public void evictCollection(String roleName, Serializable id) throws HibernateException; + + /** + * Evict any query result sets cached in the default query cache region. + */ + public void evictQueries() throws HibernateException; + /** + * Evict any query result sets cached in the named query cache region. + */ + public void evictQueries(String cacheRegion) throws HibernateException; + /** + * Get a new stateless session. + */ + public StatelessSession openStatelessSession(); + /** + * Get a new stateless session for the given JDBC connection. + */ + public StatelessSession openStatelessSession(Connection connection); + + /** + * Obtain a set of the names of all filters defined on this SessionFactory. + * + * @return The set of filter names. + */ + public Set getDefinedFilterNames(); + + /** + * Obtain the definition of a filter by name. + * + * @param filterName The name of the filter for which to obtain the definition. + * @return The filter definition. + * @throws HibernateException If no filter defined with the given name. + */ + public FilterDefinition getFilterDefinition(String filterName) throws HibernateException; +} diff --git a/src/org/hibernate/StaleObjectStateException.java b/src/org/hibernate/StaleObjectStateException.java new file mode 100644 index 0000000000..851ab380c8 --- /dev/null +++ b/src/org/hibernate/StaleObjectStateException.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; + +import org.hibernate.pretty.MessageHelper; + +/** + * A StaleStateException that carries information + * about a particular entity instance that was the source + * of the failure. + * + * @author Gavin King + */ +public class StaleObjectStateException extends StaleStateException { + private final String entityName; + private final Serializable identifier; + + public StaleObjectStateException(String persistentClass, Serializable identifier) { + super("Row was updated or deleted by another transaction (or unsaved-value mapping was incorrect)"); + this.entityName = persistentClass; + this.identifier = identifier; + } + + public String getEntityName() { + return entityName; + } + + public Serializable getIdentifier() { + return identifier; + } + + public String getMessage() { + return super.getMessage() + ": " + + MessageHelper.infoString(entityName, identifier); + } + +} + + + + + + + diff --git a/src/org/hibernate/StaleStateException.java b/src/org/hibernate/StaleStateException.java new file mode 100755 index 0000000000..310cb294dc --- /dev/null +++ b/src/org/hibernate/StaleStateException.java @@ -0,0 +1,21 @@ +//$Id$ +package org.hibernate; + +/** + * Thrown when a version number or timestamp check failed, indicating that the + * Session contained stale data (when using long transactions + * with versioning). Also occurs if we try delete or update a row that does + * not exist.
    + *
    + * Note that this exception often indicates that the user failed to specify the + * correct unsaved-value strategy for a class! + * + * @see StaleObjectStateException + * @author Gavin King + */ +public class StaleStateException extends HibernateException { + + public StaleStateException(String s) { + super(s); + } +} diff --git a/src/org/hibernate/StatelessSession.java b/src/org/hibernate/StatelessSession.java new file mode 100755 index 0000000000..118f2fcb22 --- /dev/null +++ b/src/org/hibernate/StatelessSession.java @@ -0,0 +1,217 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; +import java.sql.Connection; + +/** + * A command-oriented API for performing bulk operations + * against a database.
    + *
    + * A stateless session does not implement a first-level cache nor + * interact with any second-level cache, nor does it implement + * transactional write-behind or automatic dirty checking, nor do + * operations cascade to associated instances. Collections are + * ignored by a stateless session. Operations performed via a + * stateless session bypass Hibernate's event model and + * interceptors. Stateless sessions are vulnerable to data + * aliasing effects, due to the lack of a first-level cache.
    + *
    + * For certain kinds of transactions, a stateless session may + * perform slightly faster than a stateful session. + * + * @author Gavin King + */ +public interface StatelessSession extends Serializable { + /** + * Close the stateless session and release the JDBC connection. + */ + public void close(); + + /** + * Insert a row. + * + * @param entity a new transient instance + */ + public Serializable insert(Object entity); + + /** + * Insert a row. + * + * @param entityName The entityName for the entity to be inserted + * @param entity a new transient instance + * @return the identifier of the instance + */ + public Serializable insert(String entityName, Object entity); + + /** + * Update a row. + * + * @param entity a detached entity instance + */ + public void update(Object entity); + + /** + * Update a row. + * + * @param entityName The entityName for the entity to be updated + * @param entity a detached entity instance + */ + public void update(String entityName, Object entity); + + /** + * Delete a row. + * + * @param entity a detached entity instance + */ + public void delete(Object entity); + + /** + * Delete a row. + * + * @param entityName The entityName for the entity to be deleted + * @param entity a detached entity instance + */ + public void delete(String entityName, Object entity); + + /** + * Retrieve a row. + * + * @return a detached entity instance + */ + public Object get(String entityName, Serializable id); + + /** + * Retrieve a row. + * + * @return a detached entity instance + */ + public Object get(Class entityClass, Serializable id); + + /** + * Retrieve a row, obtaining the specified lock mode. + * + * @return a detached entity instance + */ + public Object get(String entityName, Serializable id, LockMode lockMode); + + /** + * Retrieve a row, obtaining the specified lock mode. + * + * @return a detached entity instance + */ + public Object get(Class entityClass, Serializable id, LockMode lockMode); + + /** + * Refresh the entity instance state from the database. + * + * @param entity The entity to be refreshed. + */ + public void refresh(Object entity); + + /** + * Refresh the entity instance state from the database. + * + * @param entityName The entityName for the entity to be refreshed. + * @param entity The entity to be refreshed. + */ + public void refresh(String entityName, Object entity); + + /** + * Refresh the entity instance state from the database. + * + * @param entity The entity to be refreshed. + * @param lockMode The LockMode to be applied. + */ + public void refresh(Object entity, LockMode lockMode); + + /** + * Refresh the entity instance state from the database. + * + * @param entityName The entityName for the entity to be refreshed. + * @param entity The entity to be refreshed. + * @param lockMode The LockMode to be applied. + */ + public void refresh(String entityName, Object entity, LockMode lockMode); + + /** + * Create a new instance of Query for the given HQL query string. + * Entities returned by the query are detached. + */ + public Query createQuery(String queryString); + + /** + * Obtain an instance of Query for a named query string defined in + * the mapping file. Entities returned by the query are detached. + */ + public Query getNamedQuery(String queryName); + + /** + * Create a new Criteria instance, for the given entity class, + * or a superclass of an entity class. Entities returned by the query are + * detached. + * + * @param persistentClass a class, which is persistent, or has persistent subclasses + * @return Criteria + */ + public Criteria createCriteria(Class persistentClass); + + /** + * Create a new Criteria instance, for the given entity class, + * or a superclass of an entity class, with the given alias. + * Entities returned by the query are detached. + * + * @param persistentClass a class, which is persistent, or has persistent subclasses + * @return Criteria + */ + public Criteria createCriteria(Class persistentClass, String alias); + + /** + * Create a new Criteria instance, for the given entity name. + * Entities returned by the query are detached. + * + * @param entityName + * @return Criteria + */ + public Criteria createCriteria(String entityName); + + /** + * Create a new Criteria instance, for the given entity name, + * with the given alias. Entities returned by the query are detached. + * + * @param entityName + * @return Criteria + */ + public Criteria createCriteria(String entityName, String alias); + + /** + * Create a new instance of SQLQuery for the given SQL query string. + * Entities returned by the query are detached. + * + * @param queryString a SQL query + * @return SQLQuery + * @throws HibernateException + */ + public SQLQuery createSQLQuery(String queryString) throws HibernateException; + + /** + * Begin a Hibernate transaction. + */ + public Transaction beginTransaction(); + + /** + * Get the current Hibernate transaction. + */ + public Transaction getTransaction(); + + /** + * Returns the current JDBC connection associated with this + * instance.
    + *
    + * If the session is using aggressive connection release (as in a + * CMT environment), it is the application's responsibility to + * close the connection returned by this call. Otherwise, the + * application should not close the connection. + */ + public Connection connection(); +} diff --git a/src/org/hibernate/Transaction.java b/src/org/hibernate/Transaction.java new file mode 100644 index 0000000000..238d271be7 --- /dev/null +++ b/src/org/hibernate/Transaction.java @@ -0,0 +1,105 @@ +//$Id$ +package org.hibernate; + +import javax.transaction.Synchronization; + +/** + * Allows the application to define units of work, while + * maintaining abstraction from the underlying transaction + * implementation (eg. JTA, JDBC).
    + *
    + * A transaction is associated with a Session and is + * usually instantiated by a call to Session.beginTransaction(). + * A single session might span multiple transactions since + * the notion of a session (a conversation between the application + * and the datastore) is of coarser granularity than the notion of + * a transaction. However, it is intended that there be at most one + * uncommitted Transaction associated with a particular + * Session at any time.
    + *
    + * Implementors are not intended to be threadsafe. + * + * @see Session#beginTransaction() + * @see org.hibernate.transaction.TransactionFactory + * @author Anton van Straaten + */ +public interface Transaction { + + /** + * Begin a new transaction. + */ + public void begin() throws HibernateException; + + /** + * Flush the associated Session and end the unit of work (unless + * we are in {@link FlushMode#NEVER}. + *

    + * This method will commit the underlying transaction if and only + * if the underlying transaction was initiated by this object. + * + * @throws HibernateException + */ + public void commit() throws HibernateException; + + /** + * Force the underlying transaction to roll back. + * + * @throws HibernateException + */ + public void rollback() throws HibernateException; + + /** + * Was this transaction rolled back or set to rollback only? + *

    + * This only accounts for actions initiated from this local transaction. + * If, for example, the underlying transaction is forced to rollback via + * some other means, this method still reports false because the rollback + * was not initiated from here. + * + * @return boolean True if the transaction was rolled back via this + * local transaction; false otherwise. + * @throws HibernateException + */ + public boolean wasRolledBack() throws HibernateException; + + /** + * Check if this transaction was successfully committed. + *

    + * This method could return false even after successful invocation + * of {@link #commit}. As an example, JTA based strategies no-op on + * {@link #commit} calls if they did not start the transaction; in that case, + * they also report {@link #wasCommitted} as false. + * + * @return boolean True if the transaction was (unequivocally) committed + * via this local transaction; false otherwise. + * @throws HibernateException + */ + public boolean wasCommitted() throws HibernateException; + + /** + * Is this transaction still active? + *

    + * Again, this only returns information in relation to the + * local transaction, not the actual underlying transaction. + * + * @return boolean Treu if this local transaction is still active. + */ + public boolean isActive() throws HibernateException; + + /** + * Register a user synchronization callback for this transaction. + * + * @param synchronization The Synchronization callback to register. + * @throws HibernateException + */ + public void registerSynchronization(Synchronization synchronization) + throws HibernateException; + + /** + * Set the transaction timeout for any transaction started by + * a subsequent call to begin() on this instance. + * + * @param seconds The number of seconds before a timeout. + */ + public void setTimeout(int seconds); +} diff --git a/src/org/hibernate/TransactionException.java b/src/org/hibernate/TransactionException.java new file mode 100644 index 0000000000..8b8ae6bcff --- /dev/null +++ b/src/org/hibernate/TransactionException.java @@ -0,0 +1,28 @@ +//$Id$ +package org.hibernate; + +/** + * Indicates that a transaction could not be begun, committed + * or rolled back. + * + * @see Transaction + * @author Anton van Straaten + */ + +public class TransactionException extends HibernateException { + + public TransactionException(String message, Exception root) { + super(message,root); + } + + public TransactionException(String message) { + super(message); + } + +} + + + + + + diff --git a/src/org/hibernate/TransientObjectException.java b/src/org/hibernate/TransientObjectException.java new file mode 100644 index 0000000000..020fa3cfb5 --- /dev/null +++ b/src/org/hibernate/TransientObjectException.java @@ -0,0 +1,23 @@ +//$Id$ +package org.hibernate; + +/** + * Thrown when the user passes a transient instance to a Session + * method that expects a persistent instance. + * + * @author Gavin King + */ + +public class TransientObjectException extends HibernateException { + + public TransientObjectException(String s) { + super(s); + } + +} + + + + + + diff --git a/src/org/hibernate/TypeMismatchException.java b/src/org/hibernate/TypeMismatchException.java new file mode 100644 index 0000000000..0434eb8e1b --- /dev/null +++ b/src/org/hibernate/TypeMismatchException.java @@ -0,0 +1,21 @@ +//$Id: $ +package org.hibernate; + +/** + * Used when a user provided type does not match the expected one + * + * @author Emmanuel Bernard + */ +public class TypeMismatchException extends HibernateException { + public TypeMismatchException(Throwable root) { + super( root ); + } + + public TypeMismatchException(String s) { + super( s ); + } + + public TypeMismatchException(String string, Throwable root) { + super( string, root ); + } +} diff --git a/src/org/hibernate/UnresolvableObjectException.java b/src/org/hibernate/UnresolvableObjectException.java new file mode 100644 index 0000000000..a0c31e7f6c --- /dev/null +++ b/src/org/hibernate/UnresolvableObjectException.java @@ -0,0 +1,52 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; + +import org.hibernate.pretty.MessageHelper; + +/** + * Thrown when Hibernate could not resolve an object by id, especially when + * loading an association. + * + * @author Gavin King + */ +public class UnresolvableObjectException extends HibernateException { + + private final Serializable identifier; + private final String entityName; + + public UnresolvableObjectException(Serializable identifier, String clazz) { + this("No row with the given identifier exists", identifier, clazz); + } + UnresolvableObjectException(String message, Serializable identifier, String clazz) { + super(message); + this.identifier = identifier; + this.entityName = clazz; + } + public Serializable getIdentifier() { + return identifier; + } + + public String getMessage() { + return super.getMessage() + ": " + + MessageHelper.infoString(entityName, identifier); + } + + public String getEntityName() { + return entityName; + } + + public static void throwIfNull(Object o, Serializable id, String clazz) + throws UnresolvableObjectException { + if (o==null) throw new UnresolvableObjectException(id, clazz); + } + +} + + + + + + + diff --git a/src/org/hibernate/WrongClassException.java b/src/org/hibernate/WrongClassException.java new file mode 100644 index 0000000000..f898a3d5b2 --- /dev/null +++ b/src/org/hibernate/WrongClassException.java @@ -0,0 +1,47 @@ +//$Id$ +package org.hibernate; + +import java.io.Serializable; + +/** + * Thrown when Session.load() selects a row with + * the given primary key (identifier value) but the row's + * discriminator value specifies a subclass that is not + * assignable to the class requested by the user. + * + * @author Gavin King + */ +public class WrongClassException extends HibernateException { + + private final Serializable identifier; + private final String entityName; + + public WrongClassException(String msg, Serializable identifier, String clazz) { + super(msg); + this.identifier = identifier; + this.entityName = clazz; + } + public Serializable getIdentifier() { + return identifier; + } + + public String getMessage() { + return "Object with id: " + + identifier + + " was not of the specified subclass: " + + entityName + + " (" + super.getMessage() + ")" ; + } + + public String getEntityName() { + return entityName; + } + +} + + + + + + + diff --git a/src/org/hibernate/action/BulkOperationCleanupAction.java b/src/org/hibernate/action/BulkOperationCleanupAction.java new file mode 100644 index 0000000000..1d16ba2105 --- /dev/null +++ b/src/org/hibernate/action/BulkOperationCleanupAction.java @@ -0,0 +1,151 @@ +// $Id$ +package org.hibernate.action; + +import org.hibernate.HibernateException; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.Iterator; +import java.util.HashSet; +import java.util.ArrayList; + +/** + * Implementation of BulkOperationCleanupAction. + * + * @author Steve Ebersole + */ +public class BulkOperationCleanupAction implements Executable, Serializable { + + private final SessionImplementor session; + + private final Set affectedEntityNames = new HashSet(); + private final Set affectedCollectionRoles = new HashSet(); + private final Serializable[] spaces; + + public BulkOperationCleanupAction(SessionImplementor session, Queryable[] affectedQueryables) { + this.session = session; + // TODO : probably better to calculate these and pass them in, as it'll be more performant + ArrayList tmpSpaces = new ArrayList(); + for ( int i = 0; i < affectedQueryables.length; i++ ) { + if ( affectedQueryables[i].hasCache() ) { + affectedEntityNames.add( affectedQueryables[i].getEntityName() ); + } + Set roles = session.getFactory().getCollectionRolesByEntityParticipant( affectedQueryables[i].getEntityName() ); + if ( roles != null ) { + affectedCollectionRoles.addAll( roles ); + } + for ( int y = 0; y < affectedQueryables[i].getQuerySpaces().length; y++ ) { + tmpSpaces.add( affectedQueryables[i].getQuerySpaces()[y] ); + } + } + this.spaces = new Serializable[ tmpSpaces.size() ]; + for ( int i = 0; i < tmpSpaces.size(); i++ ) { + this.spaces[i] = ( Serializable ) tmpSpaces.get( i ); + } + } + + /** Create an action that will evict collection and entity regions based on queryspaces (table names). + * TODO: cache the autodetected information and pass it in instead. + **/ + public BulkOperationCleanupAction(SessionImplementor session, Set querySpaces) { + this.session = session; + + Set tmpSpaces = new HashSet(querySpaces); + SessionFactoryImplementor factory = session.getFactory(); + Iterator iterator = factory.getAllClassMetadata().entrySet().iterator(); + while ( iterator.hasNext() ) { + Map.Entry entry = (Map.Entry) iterator.next(); + String entityName = (String) entry.getKey(); + EntityPersister persister = factory.getEntityPersister( entityName ); + Serializable[] entitySpaces = persister.getQuerySpaces(); + + if (affectedEntity( querySpaces, entitySpaces )) { + if ( persister.hasCache() ) { + affectedEntityNames.add( persister.getEntityName() ); + } + Set roles = session.getFactory().getCollectionRolesByEntityParticipant( persister.getEntityName() ); + if ( roles != null ) { + affectedCollectionRoles.addAll( roles ); + } + for ( int y = 0; y < entitySpaces.length; y++ ) { + tmpSpaces.add( entitySpaces[y] ); + } + } + + } + this.spaces = (Serializable[]) tmpSpaces.toArray( new Serializable[tmpSpaces.size()] ); + } + + + /** returns true if no queryspaces or if there are a match */ + private boolean affectedEntity(Set querySpaces, Serializable[] entitySpaces) { + if(querySpaces==null || querySpaces.isEmpty()) { + return true; + } + + for ( int i = 0; i < entitySpaces.length; i++ ) { + if ( querySpaces.contains( entitySpaces[i] ) ) { + return true; + } + } + return false; + } + + public void init() { + evictEntityRegions(); + evictCollectionRegions(); + } + + public boolean hasAfterTransactionCompletion() { + return true; + } + + public void afterTransactionCompletion(boolean success) throws HibernateException { + /////////////////////////////////////////////////////////////////////// + // HACK ALERT!!!!! + if ( session.getFactory().getSettings().getCacheProvider() instanceof org.hibernate.cache.OptimisticTreeCacheProvider + || session.getFactory().getSettings().getCacheProvider() instanceof org.hibernate.cache.TreeCacheProvider ) { + return; + } + /////////////////////////////////////////////////////////////////////// + evictEntityRegions(); + evictCollectionRegions(); + } + + public Serializable[] getPropertySpaces() { + return spaces; + } + + public void beforeExecutions() throws HibernateException { + // nothing to do + } + + public void execute() throws HibernateException { + // nothing to do + } + + private void evictEntityRegions() { + if ( affectedEntityNames != null ) { + Iterator itr = affectedEntityNames.iterator(); + while ( itr.hasNext() ) { + final String entityName = ( String ) itr.next(); + session.getFactory().evictEntity( entityName ); + } + } + } + + private void evictCollectionRegions() { + if ( affectedCollectionRoles != null ) { + Iterator itr = affectedCollectionRoles.iterator(); + while ( itr.hasNext() ) { + final String roleName = ( String ) itr.next(); + session.getFactory().evictCollection( roleName ); + } + } + } +} diff --git a/src/org/hibernate/action/CollectionAction.java b/src/org/hibernate/action/CollectionAction.java new file mode 100644 index 0000000000..d2f43fee23 --- /dev/null +++ b/src/org/hibernate/action/CollectionAction.java @@ -0,0 +1,151 @@ +//$Id$ +package org.hibernate.action; + +import org.hibernate.cache.CacheConcurrencyStrategy.SoftLock; +import org.hibernate.cache.CacheException; +import org.hibernate.cache.CacheKey; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.util.StringHelper; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.Serializable; + +/** + * Any action relating to insert/update/delete of a collection + * @author Gavin King + */ +public abstract class CollectionAction implements Executable, Serializable, Comparable { + + private transient CollectionPersister persister; + private final Serializable key; + private Serializable finalKey; + private final SessionImplementor session; + private SoftLock lock; + private final String collectionRole; + private final PersistentCollection collection; + + public CollectionAction( + final CollectionPersister persister, + final PersistentCollection collection, + final Serializable key, + final SessionImplementor session) + throws CacheException { + this.persister = persister; + this.session = session; + this.key = key; + this.collectionRole = persister.getRole(); + this.collection = collection; + } + + protected PersistentCollection getCollection() { + return collection; + } + + private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { + ois.defaultReadObject(); + persister = session.getFactory().getCollectionPersister( collectionRole ); + } + + public void afterTransactionCompletion(boolean success) throws CacheException { + if ( persister.hasCache() ) { + final CacheKey ck = new CacheKey( + key, + persister.getKeyType(), + persister.getRole(), + session.getEntityMode(), + session.getFactory() + ); + persister.getCache().release(ck, lock); + } + } + + public boolean hasAfterTransactionCompletion() { + return persister.hasCache(); + } + + public Serializable[] getPropertySpaces() { + return persister.getCollectionSpaces(); + } + + protected final CollectionPersister getPersister() { + return persister; + } + + protected final Serializable getKey() { + finalKey = key; + if ( key instanceof DelayedPostInsertIdentifier ) { + // need to look it up from the persistence-context + finalKey = session.getPersistenceContext().getEntry( collection.getOwner() ).getId(); + if ( finalKey == key ) { + // we may be screwed here since the collection action is about to execute + // and we do not know the final owner key value + } + } + return finalKey; + } + + protected final SessionImplementor getSession() { + return session; + } + + public final void beforeExecutions() throws CacheException { + // we need to obtain the lock before any actions are + // executed, since this may be an inverse="true" + // bidirectional association and it is one of the + // earlier entity actions which actually updates + // the database (this action is resposible for + // second-level cache invalidation only) + if ( persister.hasCache() ) { + final CacheKey ck = new CacheKey( + key, + persister.getKeyType(), + persister.getRole(), + session.getEntityMode(), + session.getFactory() + ); + lock = persister.getCache().lock(ck, null); + } + } + + protected final void evict() throws CacheException { + if ( persister.hasCache() ) { + CacheKey ck = new CacheKey( + key, + persister.getKeyType(), + persister.getRole(), + session.getEntityMode(), + session.getFactory() + ); + persister.getCache().evict(ck); + } + } + + public String toString() { + return StringHelper.unqualify( getClass().getName() ) + + MessageHelper.infoString( collectionRole, key ); + } + + public int compareTo(Object other) { + CollectionAction action = ( CollectionAction ) other; + //sort first by role name + int roleComparison = collectionRole.compareTo( action.collectionRole ); + if ( roleComparison != 0 ) { + return roleComparison; + } + else { + //then by fk + return persister.getKeyType() + .compare( key, action.key, session.getEntityMode() ); + } + } +} + + + + + + diff --git a/src/org/hibernate/action/CollectionRecreateAction.java b/src/org/hibernate/action/CollectionRecreateAction.java new file mode 100644 index 0000000000..e0e65df2db --- /dev/null +++ b/src/org/hibernate/action/CollectionRecreateAction.java @@ -0,0 +1,47 @@ +//$Id$ +package org.hibernate.action; + +import org.hibernate.HibernateException; +import org.hibernate.cache.CacheException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; + +import java.io.Serializable; + +public final class CollectionRecreateAction extends CollectionAction { + + public CollectionRecreateAction( + final PersistentCollection collection, + final CollectionPersister persister, + final Serializable id, + final SessionImplementor session) + throws CacheException { + super( persister, collection, id, session ); + } + + public void execute() throws HibernateException { + final PersistentCollection collection = getCollection(); + + getPersister().recreate( collection, getKey(), getSession() ); + + getSession().getPersistenceContext() + .getCollectionEntry(collection) + .afterAction(collection); + + evict(); + + if ( getSession().getFactory().getStatistics().isStatisticsEnabled() ) { + getSession().getFactory().getStatisticsImplementor() + .recreateCollection( getPersister().getRole() ); + } + } + +} + + + + + + + diff --git a/src/org/hibernate/action/CollectionRemoveAction.java b/src/org/hibernate/action/CollectionRemoveAction.java new file mode 100644 index 0000000000..4ea78f5390 --- /dev/null +++ b/src/org/hibernate/action/CollectionRemoveAction.java @@ -0,0 +1,53 @@ +//$Id$ +package org.hibernate.action; + +import org.hibernate.HibernateException; +import org.hibernate.cache.CacheException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; + +import java.io.Serializable; + +public final class CollectionRemoveAction extends CollectionAction { + + private boolean emptySnapshot; + + public CollectionRemoveAction( + final PersistentCollection collection, + final CollectionPersister persister, + final Serializable id, + final boolean emptySnapshot, + final SessionImplementor session) + throws CacheException { + super( persister, collection, id, session ); + this.emptySnapshot = emptySnapshot; + } + + public void execute() throws HibernateException { + if ( !emptySnapshot ) getPersister().remove( getKey(), getSession() ); + + final PersistentCollection collection = getCollection(); + if (collection!=null) { + getSession().getPersistenceContext() + .getCollectionEntry(collection) + .afterAction(collection); + } + + evict(); + + if ( getSession().getFactory().getStatistics().isStatisticsEnabled() ) { + getSession().getFactory().getStatisticsImplementor() + .removeCollection( getPersister().getRole() ); + } + } + + +} + + + + + + + diff --git a/src/org/hibernate/action/CollectionUpdateAction.java b/src/org/hibernate/action/CollectionUpdateAction.java new file mode 100644 index 0000000000..91c714f408 --- /dev/null +++ b/src/org/hibernate/action/CollectionUpdateAction.java @@ -0,0 +1,78 @@ +//$Id$ +package org.hibernate.action; + +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.cache.CacheException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; + +import java.io.Serializable; + +public final class CollectionUpdateAction extends CollectionAction { + + private final boolean emptySnapshot; + + public CollectionUpdateAction( + final PersistentCollection collection, + final CollectionPersister persister, + final Serializable id, + final boolean emptySnapshot, + final SessionImplementor session) + throws CacheException { + super( persister, collection, id, session ); + this.emptySnapshot = emptySnapshot; + } + + public void execute() throws HibernateException { + final Serializable id = getKey(); + final SessionImplementor session = getSession(); + final CollectionPersister persister = getPersister(); + final PersistentCollection collection = getCollection(); + boolean affectedByFilters = persister.isAffectedByEnabledFilters(session); + + if ( !collection.wasInitialized() ) { + if ( !collection.hasQueuedOperations() ) throw new AssertionFailure( "no queued adds" ); + //do nothing - we only need to notify the cache... + } + else if ( !affectedByFilters && collection.empty() ) { + if ( !emptySnapshot ) persister.remove( id, session ); + } + else if ( collection.needsRecreate(persister) ) { + if (affectedByFilters) { + throw new HibernateException( + "cannot recreate collection while filter is enabled: " + + MessageHelper.collectionInfoString( persister, id, persister.getFactory() ) + ); + } + if ( !emptySnapshot ) persister.remove( id, session ); + persister.recreate( collection, id, session ); + } + else { + persister.deleteRows( collection, id, session ); + persister.updateRows( collection, id, session ); + persister.insertRows( collection, id, session ); + } + + getSession().getPersistenceContext() + .getCollectionEntry(collection) + .afterAction(collection); + + evict(); + + if ( getSession().getFactory().getStatistics().isStatisticsEnabled() ) { + getSession().getFactory().getStatisticsImplementor(). + updateCollection( getPersister().getRole() ); + } + } + +} + + + + + + + diff --git a/src/org/hibernate/action/DelayedPostInsertIdentifier.java b/src/org/hibernate/action/DelayedPostInsertIdentifier.java new file mode 100644 index 0000000000..af9715089b --- /dev/null +++ b/src/org/hibernate/action/DelayedPostInsertIdentifier.java @@ -0,0 +1,49 @@ +package org.hibernate.action; + +import java.io.Serializable; + +/** + * Acts as a stand-in for an entity identifier which is supposed to be + * generated on insert (like an IDENTITY column) where the insert needed to + * be delayed because we were outside a transaction when the persist + * occurred (save currently still performs the insert). + *

    + * The stand-in is only used within the {@link org.hibernate.engine.PersistenceContext} + * in order to distinguish one instance from another; it is never injected into + * the entity instance or returned to the client... + * + * @author Steve Ebersole + */ +public class DelayedPostInsertIdentifier implements Serializable { + private static long SEQUENCE = 0; + private final long sequence; + + public DelayedPostInsertIdentifier() { + synchronized( DelayedPostInsertIdentifier.class ) { + if ( SEQUENCE == Long.MAX_VALUE ) { + SEQUENCE = 0; + } + this.sequence = SEQUENCE++; + } + } + + public boolean equals(Object o) { + if ( this == o ) { + return true; + } + if ( o == null || getClass() != o.getClass() ) { + return false; + } + final DelayedPostInsertIdentifier that = ( DelayedPostInsertIdentifier ) o; + return sequence == that.sequence; + } + + public int hashCode() { + return ( int ) ( sequence ^ ( sequence >>> 32 ) ); + } + + public String toString() { + return ""; + + } +} diff --git a/src/org/hibernate/action/EntityAction.java b/src/org/hibernate/action/EntityAction.java new file mode 100644 index 0000000000..47b53f24dd --- /dev/null +++ b/src/org/hibernate/action/EntityAction.java @@ -0,0 +1,136 @@ +//$Id$ +package org.hibernate.action; + +import org.hibernate.AssertionFailure; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.util.StringHelper; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.Serializable; + +/** + * Base class for actions relating to insert/update/delete of an entity + * instance. + * + * @author Gavin King + */ +public abstract class EntityAction implements Executable, Serializable, Comparable { + + private final String entityName; + private final Serializable id; + private final Object instance; + private final SessionImplementor session; + + private transient EntityPersister persister; + + /** + * Instantiate an action. + * + * @param session The session from which this action is coming. + * @param id The id of the entity + * @param instance The entiyt instance + * @param persister The entity persister + */ + protected EntityAction(SessionImplementor session, Serializable id, Object instance, EntityPersister persister) { + this.entityName = persister.getEntityName(); + this.id = id; + this.instance = instance; + this.session = session; + this.persister = persister; + } + + protected abstract boolean hasPostCommitEventListeners(); + + /** + * entity name accessor + * + * @return The entity name + */ + public String getEntityName() { + return entityName; + } + + /** + * entity id accessor + * + * @return The entity id + */ + public final Serializable getId() { + if ( id instanceof DelayedPostInsertIdentifier ) { + return session.getPersistenceContext().getEntry( instance ).getId(); + } + return id; + } + + /** + * entity instance accessor + * + * @return The entity instance + */ + public final Object getInstance() { + return instance; + } + + /** + * originating session accessor + * + * @return The session from which this action originated. + */ + public final SessionImplementor getSession() { + return session; + } + + /** + * entity persister accessor + * + * @return The entity persister + */ + public final EntityPersister getPersister() { + return persister; + } + + public final Serializable[] getPropertySpaces() { + return persister.getPropertySpaces(); + } + + public void beforeExecutions() { + throw new AssertionFailure( "beforeExecutions() called for non-collection action" ); + } + + public boolean hasAfterTransactionCompletion() { + return persister.hasCache() || hasPostCommitEventListeners(); + } + + public String toString() { + return StringHelper.unqualify( getClass().getName() ) + MessageHelper.infoString( entityName, id ); + } + + public int compareTo(Object other) { + EntityAction action = ( EntityAction ) other; + //sort first by entity name + int roleComparison = entityName.compareTo( action.entityName ); + if ( roleComparison != 0 ) { + return roleComparison; + } + else { + //then by id + return persister.getIdentifierType().compare( id, action.id, session.getEntityMode() ); + } + } + + /** + * Serialization... + * + * @param ois Thed object stream + * @throws IOException Problem performing the default stream reading + * @throws ClassNotFoundException Problem performing the default stream reading + */ + private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { + ois.defaultReadObject(); + persister = session.getFactory().getEntityPersister( entityName ); + } +} + diff --git a/src/org/hibernate/action/EntityDeleteAction.java b/src/org/hibernate/action/EntityDeleteAction.java new file mode 100644 index 0000000000..50ad219627 --- /dev/null +++ b/src/org/hibernate/action/EntityDeleteAction.java @@ -0,0 +1,174 @@ +//$Id$ +package org.hibernate.action; + +import java.io.Serializable; + +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.cache.CacheKey; +import org.hibernate.cache.CacheConcurrencyStrategy.SoftLock; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.PersistenceContext; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.PostDeleteEvent; +import org.hibernate.event.PostDeleteEventListener; +import org.hibernate.event.PreDeleteEvent; +import org.hibernate.event.PreDeleteEventListener; +import org.hibernate.event.EventSource; +import org.hibernate.persister.entity.EntityPersister; + +public final class EntityDeleteAction extends EntityAction { + + private final Object version; + private SoftLock lock; + private final boolean isCascadeDeleteEnabled; + private final Object[] state; + + public EntityDeleteAction( + final Serializable id, + final Object[] state, + final Object version, + final Object instance, + final EntityPersister persister, + final boolean isCascadeDeleteEnabled, + final SessionImplementor session) { + super( session, id, instance, persister ); + this.version = version; + this.isCascadeDeleteEnabled = isCascadeDeleteEnabled; + this.state = state; + } + + public void execute() throws HibernateException { + Serializable id = getId(); + EntityPersister persister = getPersister(); + SessionImplementor session = getSession(); + Object instance = getInstance(); + + boolean veto = preDelete(); + + Object version = this.version; + if ( persister.isVersionPropertyGenerated() ) { + // we need to grab the version value from the entity, otherwise + // we have issues with generated-version entities that may have + // multiple actions queued during the same flush + version = persister.getVersion( instance, session.getEntityMode() ); + } + + final CacheKey ck; + if ( persister.hasCache() ) { + ck = new CacheKey( + id, + persister.getIdentifierType(), + persister.getRootEntityName(), + session.getEntityMode(), + session.getFactory() + ); + lock = persister.getCache().lock(ck, version); + } + else { + ck = null; + } + + if ( !isCascadeDeleteEnabled && !veto ) { + persister.delete( id, version, instance, session ); + } + + //postDelete: + // After actually deleting a row, record the fact that the instance no longer + // exists on the database (needed for identity-column key generation), and + // remove it from the session cache + final PersistenceContext persistenceContext = session.getPersistenceContext(); + EntityEntry entry = persistenceContext.removeEntry( instance ); + if ( entry == null ) { + throw new AssertionFailure( "possible nonthreadsafe access to session" ); + } + entry.postDelete(); + + EntityKey key = new EntityKey( entry.getId(), entry.getPersister(), session.getEntityMode() ); + persistenceContext.removeEntity(key); + persistenceContext.removeProxy(key); + + if ( persister.hasCache() ) persister.getCache().evict(ck); + + postDelete(); + + if ( getSession().getFactory().getStatistics().isStatisticsEnabled() && !veto ) { + getSession().getFactory().getStatisticsImplementor() + .deleteEntity( getPersister().getEntityName() ); + } + } + + private boolean preDelete() { + PreDeleteEventListener[] preListeners = getSession().getListeners() + .getPreDeleteEventListeners(); + boolean veto = false; + if (preListeners.length>0) { + PreDeleteEvent preEvent = new PreDeleteEvent( getInstance(), getId(), state, getPersister() ); + for ( int i = 0; i < preListeners.length; i++ ) { + veto = preListeners[i].onPreDelete(preEvent) || veto; + } + } + return veto; + } + + private void postDelete() { + PostDeleteEventListener[] postListeners = getSession().getListeners() + .getPostDeleteEventListeners(); + if (postListeners.length>0) { + PostDeleteEvent postEvent = new PostDeleteEvent( + getInstance(), + getId(), + state, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostDelete(postEvent); + } + } + } + + private void postCommitDelete() { + PostDeleteEventListener[] postListeners = getSession().getListeners() + .getPostCommitDeleteEventListeners(); + if (postListeners.length>0) { + PostDeleteEvent postEvent = new PostDeleteEvent( + getInstance(), + getId(), + state, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostDelete(postEvent); + } + } + } + + public void afterTransactionCompletion(boolean success) throws HibernateException { + if ( getPersister().hasCache() ) { + final CacheKey ck = new CacheKey( + getId(), + getPersister().getIdentifierType(), + getPersister().getRootEntityName(), + getSession().getEntityMode(), + getSession().getFactory() + ); + getPersister().getCache().release(ck, lock); + } + postCommitDelete(); + } + + protected boolean hasPostCommitEventListeners() { + return getSession().getListeners().getPostCommitDeleteEventListeners().length>0; + } + +} + + + + + + + diff --git a/src/org/hibernate/action/EntityIdentityInsertAction.java b/src/org/hibernate/action/EntityIdentityInsertAction.java new file mode 100644 index 0000000000..59bf5cb054 --- /dev/null +++ b/src/org/hibernate/action/EntityIdentityInsertAction.java @@ -0,0 +1,159 @@ +//$Id$ +package org.hibernate.action; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.AssertionFailure; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.EntityKey; +import org.hibernate.event.PostInsertEvent; +import org.hibernate.event.PostInsertEventListener; +import org.hibernate.event.PreInsertEvent; +import org.hibernate.event.PreInsertEventListener; +import org.hibernate.event.EventSource; +import org.hibernate.persister.entity.EntityPersister; + +public final class EntityIdentityInsertAction extends EntityAction { + private final Object[] state; + private final boolean isDelayed; + private final EntityKey delayedEntityKey; + //private CacheEntry cacheEntry; + private Serializable generatedId; + + public EntityIdentityInsertAction( + Object[] state, + Object instance, + EntityPersister persister, + SessionImplementor session, + boolean isDelayed) throws HibernateException { + super( session, null, instance, persister ); + this.state = state; + this.isDelayed = isDelayed; + delayedEntityKey = isDelayed ? generateDelayedEntityKey() : null; + } + + public void execute() throws HibernateException { + + final EntityPersister persister = getPersister(); + final SessionImplementor session = getSession(); + final Object instance = getInstance(); + + boolean veto = preInsert(); + + // Don't need to lock the cache here, since if someone + // else inserted the same pk first, the insert would fail + + if ( !veto ) { + generatedId = persister.insert( state, instance, session ); + if ( persister.hasInsertGeneratedProperties() ) { + persister.processInsertGeneratedProperties( generatedId, instance, state, session ); + } + //need to do that here rather than in the save event listener to let + //the post insert events to have a id-filled entity when IDENTITY is used (EJB3) + persister.setIdentifier( instance, generatedId, session.getEntityMode() ); + } + + + //TODO: this bit actually has to be called after all cascades! + // but since identity insert is called *synchronously*, + // instead of asynchronously as other actions, it isn't + /*if ( persister.hasCache() && !persister.isCacheInvalidationRequired() ) { + cacheEntry = new CacheEntry(object, persister, session); + persister.getCache().insert(generatedId, cacheEntry); + }*/ + + postInsert(); + + if ( session.getFactory().getStatistics().isStatisticsEnabled() && !veto ) { + session.getFactory().getStatisticsImplementor() + .insertEntity( getPersister().getEntityName() ); + } + + } + + private void postInsert() { + if ( isDelayed ) { + getSession().getPersistenceContext().replaceDelayedEntityIdentityInsertKeys( delayedEntityKey, generatedId ); + } + PostInsertEventListener[] postListeners = getSession().getListeners() + .getPostInsertEventListeners(); + if (postListeners.length>0) { + PostInsertEvent postEvent = new PostInsertEvent( + getInstance(), + generatedId, + state, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostInsert(postEvent); + } + } + } + + private void postCommitInsert() { + PostInsertEventListener[] postListeners = getSession().getListeners() + .getPostCommitInsertEventListeners(); + if (postListeners.length>0) { + PostInsertEvent postEvent = new PostInsertEvent( + getInstance(), + generatedId, + state, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostInsert(postEvent); + } + } + } + + private boolean preInsert() { + PreInsertEventListener[] preListeners = getSession().getListeners() + .getPreInsertEventListeners(); + boolean veto = false; + if (preListeners.length>0) { + PreInsertEvent preEvent = new PreInsertEvent( getInstance(), null, state, getPersister(), getSession() ); + for ( int i = 0; i < preListeners.length; i++ ) { + veto = preListeners[i].onPreInsert(preEvent) || veto; + } + } + return veto; + } + + //Make 100% certain that this is called before any subsequent ScheduledUpdate.afterTransactionCompletion()!! + public void afterTransactionCompletion(boolean success) throws HibernateException { + //TODO: reenable if we also fix the above todo + /*EntityPersister persister = getEntityPersister(); + if ( success && persister.hasCache() && !persister.isCacheInvalidationRequired() ) { + persister.getCache().afterInsert( getGeneratedId(), cacheEntry ); + }*/ + postCommitInsert(); + } + + public boolean hasAfterTransactionCompletion() { + //TODO: simply remove this override + // if we fix the above todos + return hasPostCommitEventListeners(); + } + + protected boolean hasPostCommitEventListeners() { + return getSession().getListeners().getPostCommitInsertEventListeners().length>0; + } + + public final Serializable getGeneratedId() { + return generatedId; + } + + public EntityKey getDelayedEntityKey() { + return delayedEntityKey; + } + + private synchronized EntityKey generateDelayedEntityKey() { + if ( !isDelayed ) { + throw new AssertionFailure( "cannot request delayed entity-key for non-delayed post-insert-id generation" ); + } + return new EntityKey( new DelayedPostInsertIdentifier(), getPersister(), getSession().getEntityMode() ); + } +} diff --git a/src/org/hibernate/action/EntityInsertAction.java b/src/org/hibernate/action/EntityInsertAction.java new file mode 100644 index 0000000000..3c1acdc7d9 --- /dev/null +++ b/src/org/hibernate/action/EntityInsertAction.java @@ -0,0 +1,200 @@ +//$Id$ +package org.hibernate.action; + +import java.io.Serializable; + +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.cache.CacheKey; +import org.hibernate.cache.entry.CacheEntry; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.Versioning; +import org.hibernate.event.PostInsertEvent; +import org.hibernate.event.PostInsertEventListener; +import org.hibernate.event.PreInsertEvent; +import org.hibernate.event.PreInsertEventListener; +import org.hibernate.event.EventSource; +import org.hibernate.persister.entity.EntityPersister; + +public final class EntityInsertAction extends EntityAction { + + private Object[] state; + private Object version; + private Object cacheEntry; + + public EntityInsertAction( + Serializable id, + Object[] state, + Object instance, + Object version, + EntityPersister persister, + SessionImplementor session) throws HibernateException { + super( session, id, instance, persister ); + this.state = state; + this.version = version; + } + + public Object[] getState() { + return state; + } + + public void execute() throws HibernateException { + EntityPersister persister = getPersister(); + SessionImplementor session = getSession(); + Object instance = getInstance(); + Serializable id = getId(); + + boolean veto = preInsert(); + + // Don't need to lock the cache here, since if someone + // else inserted the same pk first, the insert would fail + + if ( !veto ) { + + persister.insert( id, state, instance, session ); + + EntityEntry entry = session.getPersistenceContext().getEntry( instance ); + if ( entry == null ) { + throw new AssertionFailure( "possible nonthreadsafe access to session" ); + } + + entry.postInsert(); + + if ( persister.hasInsertGeneratedProperties() ) { + persister.processInsertGeneratedProperties( id, instance, state, session ); + if ( persister.isVersionPropertyGenerated() ) { + version = Versioning.getVersion(state, persister); + } + entry.postUpdate(instance, state, version); + } + + } + + final SessionFactoryImplementor factory = getSession().getFactory(); + + if ( isCachePutEnabled( persister, session ) ) { + + CacheEntry ce = new CacheEntry( + state, + persister, + persister.hasUninitializedLazyProperties( instance, session.getEntityMode() ), + version, + session, + instance + ); + + cacheEntry = persister.getCacheEntryStructure().structure(ce); + final CacheKey ck = new CacheKey( + id, + persister.getIdentifierType(), + persister.getRootEntityName(), + session.getEntityMode(), + session.getFactory() + ); +// boolean put = persister.getCache().insert(ck, cacheEntry); + boolean put = persister.getCache().insert( ck, cacheEntry, version ); + + if ( put && factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor() + .secondLevelCachePut( getPersister().getCache().getRegionName() ); + } + + } + + postInsert(); + + if ( factory.getStatistics().isStatisticsEnabled() && !veto ) { + factory.getStatisticsImplementor() + .insertEntity( getPersister().getEntityName() ); + } + + } + + private void postInsert() { + PostInsertEventListener[] postListeners = getSession().getListeners() + .getPostInsertEventListeners(); + if ( postListeners.length > 0 ) { + PostInsertEvent postEvent = new PostInsertEvent( + getInstance(), + getId(), + state, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostInsert(postEvent); + } + } + } + + private void postCommitInsert() { + PostInsertEventListener[] postListeners = getSession().getListeners() + .getPostCommitInsertEventListeners(); + if ( postListeners.length > 0 ) { + PostInsertEvent postEvent = new PostInsertEvent( + getInstance(), + getId(), + state, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostInsert(postEvent); + } + } + } + + private boolean preInsert() { + PreInsertEventListener[] preListeners = getSession().getListeners() + .getPreInsertEventListeners(); + boolean veto = false; + if (preListeners.length>0) { + PreInsertEvent preEvent = new PreInsertEvent( getInstance(), getId(), state, getPersister(), getSession() ); + for ( int i = 0; i < preListeners.length; i++ ) { + veto = preListeners[i].onPreInsert(preEvent) || veto; + } + } + return veto; + } + + //Make 100% certain that this is called before any subsequent ScheduledUpdate.afterTransactionCompletion()!! + public void afterTransactionCompletion(boolean success) throws HibernateException { + EntityPersister persister = getPersister(); + if ( success && isCachePutEnabled( persister, getSession() ) ) { + final CacheKey ck = new CacheKey( + getId(), + persister.getIdentifierType(), + persister.getRootEntityName(), + getSession().getEntityMode(), + getSession().getFactory() + ); + boolean put = persister.getCache().afterInsert(ck, cacheEntry, version ); + + if ( put && getSession().getFactory().getStatistics().isStatisticsEnabled() ) { + getSession().getFactory().getStatisticsImplementor() + .secondLevelCachePut( getPersister().getCache().getRegionName() ); + } + } + postCommitInsert(); + } + + protected boolean hasPostCommitEventListeners() { + return getSession().getListeners().getPostCommitInsertEventListeners().length>0; + } + + private boolean isCachePutEnabled(EntityPersister persister, SessionImplementor session) { + return persister.hasCache() && + !persister.isCacheInvalidationRequired() && + session.getCacheMode().isPutEnabled(); + } + +} + + + + + + + diff --git a/src/org/hibernate/action/EntityUpdateAction.java b/src/org/hibernate/action/EntityUpdateAction.java new file mode 100644 index 0000000000..28c8e92b29 --- /dev/null +++ b/src/org/hibernate/action/EntityUpdateAction.java @@ -0,0 +1,261 @@ +//$Id$ +package org.hibernate.action; + +import java.io.Serializable; + +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.cache.CacheException; +import org.hibernate.cache.CacheKey; +import org.hibernate.cache.CacheConcurrencyStrategy.SoftLock; +import org.hibernate.cache.entry.CacheEntry; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.Status; +import org.hibernate.engine.Versioning; +import org.hibernate.event.PostUpdateEvent; +import org.hibernate.event.PostUpdateEventListener; +import org.hibernate.event.PreUpdateEvent; +import org.hibernate.event.PreUpdateEventListener; +import org.hibernate.event.EventSource; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.TypeFactory; + +public final class EntityUpdateAction extends EntityAction { + + private final Object[] state; + private final Object[] previousState; + private final Object previousVersion; + private Object nextVersion; + private final int[] dirtyFields; + private final boolean hasDirtyCollection; + private final Object rowId; + private Object cacheEntry; + private SoftLock lock; + + public EntityUpdateAction( + final Serializable id, + final Object[] state, + final int[] dirtyProperties, + final boolean hasDirtyCollection, + final Object[] previousState, + final Object previousVersion, + final Object nextVersion, + final Object instance, + final Object rowId, + final EntityPersister persister, + final SessionImplementor session) throws HibernateException { + super( session, id, instance, persister ); + this.state = state; + this.previousState = previousState; + this.previousVersion = previousVersion; + this.nextVersion = nextVersion; + this.dirtyFields = dirtyProperties; + this.hasDirtyCollection = hasDirtyCollection; + this.rowId = rowId; + } + + public void execute() throws HibernateException { + Serializable id = getId(); + EntityPersister persister = getPersister(); + SessionImplementor session = getSession(); + Object instance = getInstance(); + + boolean veto = preUpdate(); + + final SessionFactoryImplementor factory = getSession().getFactory(); + Object previousVersion = this.previousVersion; + if ( persister.isVersionPropertyGenerated() ) { + // we need to grab the version value from the entity, otherwise + // we have issues with generated-version entities that may have + // multiple actions queued during the same flush + previousVersion = persister.getVersion( instance, session.getEntityMode() ); + } + + final CacheKey ck; + if ( persister.hasCache() ) { + ck = new CacheKey( + id, + persister.getIdentifierType(), + persister.getRootEntityName(), + session.getEntityMode(), + session.getFactory() + ); + lock = persister.getCache().lock(ck, previousVersion); + } + else { + ck = null; + } + + if ( !veto ) { + persister.update( + id, + state, + dirtyFields, + hasDirtyCollection, + previousState, + previousVersion, + instance, + rowId, + session + ); + } + + EntityEntry entry = getSession().getPersistenceContext().getEntry( instance ); + if ( entry == null ) { + throw new AssertionFailure( "possible nonthreadsafe access to session" ); + } + + if ( entry.getStatus()==Status.MANAGED || persister.isVersionPropertyGenerated() ) { + // get the updated snapshot of the entity state by cloning current state; + // it is safe to copy in place, since by this time no-one else (should have) + // has a reference to the array + TypeFactory.deepCopy( + state, + persister.getPropertyTypes(), + persister.getPropertyCheckability(), + state, + session + ); + if ( persister.hasUpdateGeneratedProperties() ) { + // this entity defines proeprty generation, so process those generated + // values... + persister.processUpdateGeneratedProperties( id, instance, state, session ); + if ( persister.isVersionPropertyGenerated() ) { + nextVersion = Versioning.getVersion( state, persister ); + } + } + // have the entity entry perform post-update processing, passing it the + // update state and the new version (if one). + entry.postUpdate( instance, state, nextVersion ); + } + + if ( persister.hasCache() ) { + if ( persister.isCacheInvalidationRequired() || entry.getStatus()!=Status.MANAGED ) { + persister.getCache().evict(ck); + } + else { + //TODO: inefficient if that cache is just going to ignore the updated state! + CacheEntry ce = new CacheEntry( + state, + persister, + persister.hasUninitializedLazyProperties( instance, session.getEntityMode() ), + nextVersion, + getSession(), + instance + ); + cacheEntry = persister.getCacheEntryStructure().structure(ce); +// boolean put = persister.getCache().update(ck, cacheEntry); + boolean put = persister.getCache().update( ck, cacheEntry, nextVersion, previousVersion ); + + if ( put && factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor() + .secondLevelCachePut( getPersister().getCache().getRegionName() ); + } + } + } + + postUpdate(); + + if ( factory.getStatistics().isStatisticsEnabled() && !veto ) { + factory.getStatisticsImplementor() + .updateEntity( getPersister().getEntityName() ); + } + } + + private void postUpdate() { + PostUpdateEventListener[] postListeners = getSession().getListeners() + .getPostUpdateEventListeners(); + if (postListeners.length>0) { + PostUpdateEvent postEvent = new PostUpdateEvent( + getInstance(), + getId(), + state, + previousState, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostUpdate(postEvent); + } + } + } + + private void postCommitUpdate() { + PostUpdateEventListener[] postListeners = getSession().getListeners() + .getPostCommitUpdateEventListeners(); + if (postListeners.length>0) { + PostUpdateEvent postEvent = new PostUpdateEvent( + getInstance(), + getId(), + state, + previousState, + getPersister(), + (EventSource) getSession() + ); + for ( int i = 0; i < postListeners.length; i++ ) { + postListeners[i].onPostUpdate(postEvent); + } + } + } + + private boolean preUpdate() { + PreUpdateEventListener[] preListeners = getSession().getListeners() + .getPreUpdateEventListeners(); + boolean veto = false; + if (preListeners.length>0) { + PreUpdateEvent preEvent = new PreUpdateEvent( + getInstance(), + getId(), + state, + previousState, + getPersister(), + getSession() + ); + for ( int i = 0; i < preListeners.length; i++ ) { + veto = preListeners[i].onPreUpdate(preEvent) || veto; + } + } + return veto; + } + + public void afterTransactionCompletion(boolean success) throws CacheException { + EntityPersister persister = getPersister(); + if ( persister.hasCache() ) { + + final CacheKey ck = new CacheKey( + getId(), + persister.getIdentifierType(), + persister.getRootEntityName(), + getSession().getEntityMode(), + getSession().getFactory() + ); + + if ( success && cacheEntry!=null /*!persister.isCacheInvalidationRequired()*/ ) { + boolean put = persister.getCache().afterUpdate(ck, cacheEntry, nextVersion, lock ); + + if ( put && getSession().getFactory().getStatistics().isStatisticsEnabled() ) { + getSession().getFactory().getStatisticsImplementor() + .secondLevelCachePut( getPersister().getCache().getRegionName() ); + } + } + else { + persister.getCache().release(ck, lock ); + } + } + postCommitUpdate(); + } + + protected boolean hasPostCommitEventListeners() { + return getSession().getListeners().getPostCommitUpdateEventListeners().length>0; + } + +} + + + + + + + diff --git a/src/org/hibernate/action/Executable.java b/src/org/hibernate/action/Executable.java new file mode 100644 index 0000000000..4860fa9f1e --- /dev/null +++ b/src/org/hibernate/action/Executable.java @@ -0,0 +1,39 @@ +//$Id$ +package org.hibernate.action; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * An operation which may be scheduled for later execution. + * Usually, the operation is a database insert/update/delete, + * together with required second-level cache management. + * + * @author Gavin King + */ +public interface Executable { + /** + * Called before executing any actions + */ + public void beforeExecutions() throws HibernateException; + /** + * Execute this action + */ + public void execute() throws HibernateException; + /** + * Do we need to retain this instance until after the + * transaction completes? + * @return false if this class defines a no-op + * hasAfterTransactionCompletion() + */ + public boolean hasAfterTransactionCompletion(); + /** + * Called after the transaction completes + */ + public void afterTransactionCompletion(boolean success) throws HibernateException; + /** + * What spaces (tables) are affected by this action? + */ + public Serializable[] getPropertySpaces(); +} diff --git a/src/org/hibernate/action/package.html b/src/org/hibernate/action/package.html new file mode 100755 index 0000000000..c9dd1b09b3 --- /dev/null +++ b/src/org/hibernate/action/package.html @@ -0,0 +1,10 @@ + + + + +

    + This package defines "actions" that are scheduled for + asycnchronous execution by the event listeners. +

    + + diff --git a/src/org/hibernate/bytecode/AbstractClassTransformerImpl.java b/src/org/hibernate/bytecode/AbstractClassTransformerImpl.java new file mode 100644 index 0000000000..ad457485a2 --- /dev/null +++ b/src/org/hibernate/bytecode/AbstractClassTransformerImpl.java @@ -0,0 +1,45 @@ +//$Id: $ +package org.hibernate.bytecode; + +import org.hibernate.bytecode.util.ClassFilter; +import org.hibernate.bytecode.util.FieldFilter; + +import java.security.ProtectionDomain; + +/** + * @author Emmanuel Bernard + * @author Steve Ebersole + */ +public abstract class AbstractClassTransformerImpl implements ClassTransformer { + + protected final ClassFilter classFilter; + protected final FieldFilter fieldFilter; + + protected AbstractClassTransformerImpl(ClassFilter classFilter, FieldFilter fieldFilter) { + this.classFilter = classFilter; + this.fieldFilter = fieldFilter; + } + + public byte[] transform( + ClassLoader loader, + String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, + byte[] classfileBuffer) { + // to be safe... + className = className.replace( '/', '.' ); + if ( classFilter.shouldInstrumentClass( className ) ) { + return doTransform( loader, className, classBeingRedefined, protectionDomain, classfileBuffer ); + } + else { + return classfileBuffer; + } + } + + protected abstract byte[] doTransform( + ClassLoader loader, + String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, + byte[] classfileBuffer); +} diff --git a/src/org/hibernate/bytecode/BasicProxyFactory.java b/src/org/hibernate/bytecode/BasicProxyFactory.java new file mode 100644 index 0000000000..0bb5e582a9 --- /dev/null +++ b/src/org/hibernate/bytecode/BasicProxyFactory.java @@ -0,0 +1,10 @@ +package org.hibernate.bytecode; + +/** + * A proxy factory for "basic proxy" generation + * + * @author Steve Ebersole + */ +public interface BasicProxyFactory { + public Object getProxy(); +} diff --git a/src/org/hibernate/bytecode/BytecodeProvider.java b/src/org/hibernate/bytecode/BytecodeProvider.java new file mode 100644 index 0000000000..0f780d937a --- /dev/null +++ b/src/org/hibernate/bytecode/BytecodeProvider.java @@ -0,0 +1,49 @@ +package org.hibernate.bytecode; + +import org.hibernate.bytecode.util.ClassFilter; +import org.hibernate.bytecode.util.FieldFilter; + +/** + * Contract for providers of bytecode services to Hibernate. + *

    + * Bytecode requirements break down into basically 3 areas

      + *
    1. proxy generation (both for runtime-lazy-loading and basic proxy generation) + * {@link #getProxyFactoryFactory()} + *
    2. bean relection optimization {@link #getReflectionOptimizer} + *
    3. field-access instumentation {@link #getTransformer} + *
    + * + * @author Steve Ebersole + */ +public interface BytecodeProvider { + /** + * Retrieve the specific factory for this provider capable of + * generating run-time proxies for lazy-loading purposes. + * + * @return The provider specifc factory. + */ + public ProxyFactoryFactory getProxyFactoryFactory(); + + /** + * Retrieve the ReflectionOptimizer delegate for this provider + * capable of generating reflection optimization components. + * + * @param clazz The class to be reflected upon. + * @param getterNames Names of all property getters to be accessed via reflection. + * @param setterNames Names of all property setters to be accessed via reflection. + * @param types The types of all properties to be accessed. + * @return The reflection optimization delegate. + */ + public ReflectionOptimizer getReflectionOptimizer(Class clazz, String[] getterNames, String[] setterNames, Class[] types); + + /** + * Generate a ClassTransformer capable of performing bytecode manipulation. + * + * @param classFilter filter used to limit which classes are to be instrumented + * via this ClassTransformer. + * @param fieldFilter filter used to limit which fields are to be instrumented + * via this ClassTransformer. + * @return The appropriate ClassTransformer. + */ + public ClassTransformer getTransformer(ClassFilter classFilter, FieldFilter fieldFilter); +} diff --git a/src/org/hibernate/bytecode/ClassTransformer.java b/src/org/hibernate/bytecode/ClassTransformer.java new file mode 100644 index 0000000000..2f380cec4a --- /dev/null +++ b/src/org/hibernate/bytecode/ClassTransformer.java @@ -0,0 +1,34 @@ +//$Id: $ +package org.hibernate.bytecode; + +import java.security.ProtectionDomain; + +/** + * A persistence provider provides an instance of this interface + * to the PersistenceUnitInfo.addTransformer method. + * The supplied transformer instance will get called to transform + * entity class files when they are loaded and redefined. The transformation + * occurs before the class is defined by the JVM + * + * + * @author Bill Burke + * @author Emmanuel Bernard + */ +public interface ClassTransformer +{ + /** + * Invoked when a class is being loaded or redefined to add hooks for persistence bytecode manipulation + * + * @param loader the defining class loaderof the class being transformed. It may be null if using bootstrap loader + * @param classname The name of the class being transformed + * @param classBeingRedefined If an already loaded class is being redefined, then pass this as a parameter + * @param protectionDomain ProtectionDomain of the class being (re)-defined + * @param classfileBuffer The input byte buffer in class file format + * @return A well-formed class file that can be loaded + */ + public byte[] transform(ClassLoader loader, + String classname, + Class classBeingRedefined, + ProtectionDomain protectionDomain, + byte[] classfileBuffer); +} diff --git a/src/org/hibernate/bytecode/InstrumentedClassLoader.java b/src/org/hibernate/bytecode/InstrumentedClassLoader.java new file mode 100644 index 0000000000..dc5aa70a2c --- /dev/null +++ b/src/org/hibernate/bytecode/InstrumentedClassLoader.java @@ -0,0 +1,54 @@ +package org.hibernate.bytecode; + +import org.hibernate.bytecode.util.ByteCodeHelper; + +import java.io.InputStream; + +/** + * A specialized classloader which performs bytecode enhancement on class + * definitions as they are loaded into the classloader scope. + * + * @author Emmanuel Bernard + * @author Steve Ebersole + */ +public class InstrumentedClassLoader extends ClassLoader { + + private ClassTransformer classTransformer; + + public InstrumentedClassLoader(ClassLoader parent, ClassTransformer classTransformer) { + super( parent ); + this.classTransformer = classTransformer; + } + + public Class loadClass(String name) throws ClassNotFoundException { + if ( name.startsWith( "java." ) || classTransformer == null ) { + return getParent().loadClass( name ); + } + + Class c = findLoadedClass( name ); + if ( c != null ) { + return c; + } + + InputStream is = this.getResourceAsStream( name.replace( '.', '/' ) + ".class" ); + if ( is == null ) { + throw new ClassNotFoundException( name + " not found" ); + } + + try { + byte[] originalBytecode = ByteCodeHelper.readByteCode( is ); + byte[] transformedBytecode = classTransformer.transform( getParent(), name, null, null, originalBytecode ); + if ( originalBytecode == transformedBytecode ) { + // no transformations took place, so handle it as we would a + // non-instrumented class + return getParent().loadClass( name ); + } + else { + return defineClass( name, transformedBytecode, 0, transformedBytecode.length ); + } + } + catch( Throwable t ) { + throw new ClassNotFoundException( name + " not found", t ); + } + } +} diff --git a/src/org/hibernate/bytecode/ProxyFactoryFactory.java b/src/org/hibernate/bytecode/ProxyFactoryFactory.java new file mode 100644 index 0000000000..c44dc926c7 --- /dev/null +++ b/src/org/hibernate/bytecode/ProxyFactoryFactory.java @@ -0,0 +1,37 @@ +package org.hibernate.bytecode; + +import org.hibernate.proxy.ProxyFactory; + +/** + * An interface for factories of {@link ProxyFactory proxy factory} instances. + *

    + * Currently used to abstract from the tupizer whether we are using CGLIB or + * Javassist for lazy proxy generation. + * + * @author Steve Ebersole + */ +public interface ProxyFactoryFactory { + /** + * Build a proxy factory specifically for handling runtime + * lazy loading. + * + * @return The lazy-load proxy factory. + */ + public ProxyFactory buildProxyFactory(); + + /** + * Build a proxy factory for basic proxy concerns. The return + * should be capable of properly handling newInstance() calls. + *

    + * Should build basic proxies essentially equivalent to JDK proxies in + * terms of capabilities, but should be able to deal with abstract super + * classes in addition to proxy interfaces. + *

    + * Must pass in either superClass or interfaces (or both). + * + * @param superClass The abstract super class (or null if none). + * @param interfaces Interfaces to be proxied (or null if none). + * @return The proxy class + */ + public BasicProxyFactory buildBasicProxyFactory(Class superClass, Class[] interfaces); +} diff --git a/src/org/hibernate/bytecode/ReflectionOptimizer.java b/src/org/hibernate/bytecode/ReflectionOptimizer.java new file mode 100644 index 0000000000..83d6b60153 --- /dev/null +++ b/src/org/hibernate/bytecode/ReflectionOptimizer.java @@ -0,0 +1,35 @@ +package org.hibernate.bytecode; + +/** + * Represents reflection optimization for a particular class. + * + * @author Steve Ebersole + */ +public interface ReflectionOptimizer { + + public InstantiationOptimizer getInstantiationOptimizer(); + public AccessOptimizer getAccessOptimizer(); + + /** + * Represents optimized entity instantiation. + */ + public static interface InstantiationOptimizer { + /** + * Perform instantiation of an instance of the underlying class. + * + * @return The new instance. + */ + public Object newInstance(); + } + + /** + * Represents optimized entity property access. + * + * @author Steve Ebersole + */ + public interface AccessOptimizer { + public String[] getPropertyNames(); + public Object[] getPropertyValues(Object object); + public void setPropertyValues(Object object, Object[] values); + } +} diff --git a/src/org/hibernate/bytecode/cglib/AccessOptimizerAdapter.java b/src/org/hibernate/bytecode/cglib/AccessOptimizerAdapter.java new file mode 100644 index 0000000000..092e4a414c --- /dev/null +++ b/src/org/hibernate/bytecode/cglib/AccessOptimizerAdapter.java @@ -0,0 +1,102 @@ +package org.hibernate.bytecode.cglib; + +import org.hibernate.bytecode.ReflectionOptimizer; +import org.hibernate.PropertyAccessException; +import net.sf.cglib.beans.BulkBean; +import net.sf.cglib.beans.BulkBeanException; +import net.sf.cglib.reflect.FastClass; + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; + +/** + * The {@link ReflectionOptimizer.AccessOptimizer} implementation for CGLIB + * which simply acts as an adpater to the {@link BulkBean} class. + * + * @author Steve Ebersole + */ +public class AccessOptimizerAdapter implements ReflectionOptimizer.AccessOptimizer, Serializable { + + public static final String PROPERTY_GET_EXCEPTION = + "exception getting property value with CGLIB (set hibernate.bytecode.use_reflection_optimizer=false for more info)"; + + public static final String PROPERTY_SET_EXCEPTION = + "exception setting property value with CGLIB (set hibernate.bytecode.use_reflection_optimizer=false for more info)"; + + private Class mappedClass; + private BulkBean bulkBean; + + public AccessOptimizerAdapter(BulkBean bulkBean, Class mappedClass) { + this.bulkBean = bulkBean; + this.mappedClass = mappedClass; + } + + public String[] getPropertyNames() { + return bulkBean.getGetters(); + } + + public Object[] getPropertyValues(Object object) { + try { + return bulkBean.getPropertyValues( object ); + } + catch ( Throwable t ) { + throw new PropertyAccessException( + t, + PROPERTY_GET_EXCEPTION, + false, + mappedClass, + getterName( t, bulkBean ) + ); + } + } + + public void setPropertyValues(Object object, Object[] values) { + try { + bulkBean.setPropertyValues( object, values ); + } + catch ( Throwable t ) { + throw new PropertyAccessException( + t, + PROPERTY_SET_EXCEPTION, + true, + mappedClass, + setterName( t, bulkBean ) + ); + } + } + + private static String setterName(Throwable t, BulkBean optimizer) { + if ( t instanceof BulkBeanException ) { + return optimizer.getSetters()[( ( BulkBeanException ) t ).getIndex()]; + } + else { + return "?"; + } + } + + private static String getterName(Throwable t, BulkBean optimizer) { + if ( t instanceof BulkBeanException ) { + return optimizer.getGetters()[( ( BulkBeanException ) t ).getIndex()]; + } + else { + return "?"; + } + } + + private void writeObject(ObjectOutputStream out) throws IOException { + out.writeObject( mappedClass ); + out.writeObject( bulkBean.getGetters() ); + out.writeObject( bulkBean.getSetters() ); + out.writeObject( bulkBean.getPropertyTypes() ); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + Class beanClass = ( Class ) in.readObject(); + String[] getters = ( String[] ) in.readObject(); + String[] setters = ( String[] ) in.readObject(); + Class[] types = ( Class[] ) in.readObject(); + bulkBean = BulkBean.create( beanClass, getters, setters, types ); + } +} diff --git a/src/org/hibernate/bytecode/cglib/BytecodeProviderImpl.java b/src/org/hibernate/bytecode/cglib/BytecodeProviderImpl.java new file mode 100644 index 0000000000..aa21d75457 --- /dev/null +++ b/src/org/hibernate/bytecode/cglib/BytecodeProviderImpl.java @@ -0,0 +1,92 @@ +package org.hibernate.bytecode.cglib; + +import java.lang.reflect.Modifier; + +import net.sf.cglib.beans.BulkBean; +import net.sf.cglib.beans.BulkBeanException; +import net.sf.cglib.reflect.FastClass; +import net.sf.cglib.transform.ClassFilter; +import net.sf.cglib.transform.ClassTransformer; +import net.sf.cglib.transform.ClassTransformerFactory; +import net.sf.cglib.transform.TransformingClassLoader; +import net.sf.cglib.transform.impl.InterceptFieldFilter; +import net.sf.cglib.transform.impl.InterceptFieldTransformer; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.bytecode.BytecodeProvider; +import org.hibernate.bytecode.ProxyFactoryFactory; +import org.hibernate.bytecode.ReflectionOptimizer; +import org.hibernate.bytecode.util.FieldFilter; +import org.hibernate.util.StringHelper; +import org.objectweb.asm.Type; + +/** + * Bytecode provider implementation for CGLIB. + * + * @author Steve Ebersole + */ +public class BytecodeProviderImpl implements BytecodeProvider { + + private static final Log log = LogFactory.getLog( BytecodeProviderImpl.class ); + + public ProxyFactoryFactory getProxyFactoryFactory() { + return new ProxyFactoryFactoryImpl(); + } + + public ReflectionOptimizer getReflectionOptimizer( + Class clazz, + String[] getterNames, + String[] setterNames, + Class[] types) { + FastClass fastClass; + BulkBean bulkBean; + try { + fastClass = FastClass.create( clazz ); + bulkBean = BulkBean.create( clazz, getterNames, setterNames, types ); + if ( !clazz.isInterface() && !Modifier.isAbstract( clazz.getModifiers() ) ) { + if ( fastClass == null ) { + bulkBean = null; + } + else { + //test out the optimizer: + Object instance = fastClass.newInstance(); + bulkBean.setPropertyValues( instance, bulkBean.getPropertyValues( instance ) ); + } + } + } + catch( Throwable t ) { + fastClass = null; + bulkBean = null; + String message = "reflection optimizer disabled for: " + + clazz.getName() + + " [" + + StringHelper.unqualify( t.getClass().getName() ) + + ": " + + t.getMessage(); + + if (t instanceof BulkBeanException ) { + int index = ( (BulkBeanException) t ).getIndex(); + if (index >= 0) { + message += " (property " + setterNames[index] + ")"; + } + } + + log.debug( message ); + } + + if ( fastClass != null && bulkBean != null ) { + return new ReflectionOptimizerImpl( + new InstantiationOptimizerAdapter( fastClass ), + new AccessOptimizerAdapter( bulkBean, clazz ) + ); + } + else { + return null; + } + } + + public org.hibernate.bytecode.ClassTransformer getTransformer(org.hibernate.bytecode.util.ClassFilter classFilter, FieldFilter fieldFilter) { + return new CglibClassTransformer( classFilter, fieldFilter ); + } + +} diff --git a/src/org/hibernate/bytecode/cglib/CglibClassTransformer.java b/src/org/hibernate/bytecode/cglib/CglibClassTransformer.java new file mode 100644 index 0000000000..9f8834ef54 --- /dev/null +++ b/src/org/hibernate/bytecode/cglib/CglibClassTransformer.java @@ -0,0 +1,121 @@ +//$Id: $ +package org.hibernate.bytecode.cglib; + +import java.security.ProtectionDomain; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.ByteArrayOutputStream; + +import net.sf.cglib.transform.ClassTransformer; +import net.sf.cglib.transform.TransformingClassGenerator; +import net.sf.cglib.transform.ClassReaderGenerator; +import net.sf.cglib.transform.impl.InterceptFieldEnabled; +import net.sf.cglib.transform.impl.InterceptFieldFilter; +import net.sf.cglib.transform.impl.InterceptFieldTransformer; +import net.sf.cglib.core.ClassNameReader; +import net.sf.cglib.core.DebuggingClassWriter; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.bytecode.AbstractClassTransformerImpl; +import org.hibernate.bytecode.util.FieldFilter; +import org.hibernate.bytecode.util.ClassFilter; +import org.hibernate.HibernateException; +import org.objectweb.asm.Attribute; +import org.objectweb.asm.Type; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.attrs.Attributes; + +/** + * Enhance the classes allowing them to implements InterceptFieldEnabled + * This interface is then used by Hibernate for some optimizations. + * + * @author Emmanuel Bernard + * @author Steve Ebersole + */ +public class CglibClassTransformer extends AbstractClassTransformerImpl { + + private static Log log = LogFactory.getLog( CglibClassTransformer.class.getName() ); + + public CglibClassTransformer(ClassFilter classFilter, FieldFilter fieldFilter) { + super( classFilter, fieldFilter ); + } + + protected byte[] doTransform( + ClassLoader loader, + String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, + byte[] classfileBuffer) { + ClassReader reader; + try { + reader = new ClassReader( new ByteArrayInputStream( classfileBuffer ) ); + } + catch (IOException e) { + log.error( "Unable to read class", e ); + throw new HibernateException( "Unable to read class: " + e.getMessage() ); + } + + String[] names = ClassNameReader.getClassInfo( reader ); + ClassWriter w = new DebuggingClassWriter( true ); + ClassTransformer t = getClassTransformer( names ); + if ( t != null ) { + if ( log.isDebugEnabled() ) { + log.debug( "Enhancing " + className ); + } + ByteArrayOutputStream out; + byte[] result; + try { + reader = new ClassReader( new ByteArrayInputStream( classfileBuffer ) ); + new TransformingClassGenerator( + new ClassReaderGenerator( reader, attributes(), skipDebug() ), t + ).generateClass( w ); + out = new ByteArrayOutputStream(); + out.write( w.toByteArray() ); + result = out.toByteArray(); + out.close(); + } + catch (Exception e) { + log.error( "Unable to transform class", e ); + throw new HibernateException( "Unable to transform class: " + e.getMessage() ); + } + return result; + } + return classfileBuffer; + } + + + private Attribute[] attributes() { + return Attributes.getDefaultAttributes(); + } + + private boolean skipDebug() { + return false; + } + + private ClassTransformer getClassTransformer(final String[] classInfo) { + if ( isAlreadyInstrumented( classInfo ) ) { + return null; + } + return new InterceptFieldTransformer( + new InterceptFieldFilter() { + public boolean acceptRead(Type owner, String name) { + return fieldFilter.shouldTransformFieldAccess( classInfo[0], owner.getClassName(), name ); + } + + public boolean acceptWrite(Type owner, String name) { + return fieldFilter.shouldTransformFieldAccess( classInfo[0], owner.getClassName(), name ); + } + } + ); + } + + private boolean isAlreadyInstrumented(String[] classInfo) { + for ( int i = 1; i < classInfo.length; i++ ) { + if ( InterceptFieldEnabled.class.getName().equals( classInfo[i] ) ) { + return true; + } + } + return false; + } +} diff --git a/src/org/hibernate/bytecode/cglib/InstantiationOptimizerAdapter.java b/src/org/hibernate/bytecode/cglib/InstantiationOptimizerAdapter.java new file mode 100644 index 0000000000..f4f0916db8 --- /dev/null +++ b/src/org/hibernate/bytecode/cglib/InstantiationOptimizerAdapter.java @@ -0,0 +1,46 @@ +package org.hibernate.bytecode.cglib; + +import org.hibernate.bytecode.ReflectionOptimizer; +import net.sf.cglib.reflect.FastClass; +import org.hibernate.InstantiationException; + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; + +/** + * The {@link ReflectionOptimizer.InstantiationOptimizer} implementation for CGLIB + * which simply acts as an adpater to the {@link FastClass} class. + * + * @author Steve Ebersole + */ +public class InstantiationOptimizerAdapter implements ReflectionOptimizer.InstantiationOptimizer, Serializable { + private FastClass fastClass; + + public InstantiationOptimizerAdapter(FastClass fastClass) { + this.fastClass = fastClass; + } + + public Object newInstance() { + try { + return fastClass.newInstance(); + } + catch ( Throwable t ) { + throw new InstantiationException( + "Could not instantiate entity with CGLIB optimizer: ", + fastClass.getJavaClass(), + t + ); + } + } + + private void writeObject(ObjectOutputStream out) throws IOException { + out.writeObject( fastClass.getJavaClass() ); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + Class beanClass = ( Class ) in.readObject(); + fastClass = FastClass.create( beanClass ); + } +} diff --git a/src/org/hibernate/bytecode/cglib/ProxyFactoryFactoryImpl.java b/src/org/hibernate/bytecode/cglib/ProxyFactoryFactoryImpl.java new file mode 100644 index 0000000000..4ff8e37ed4 --- /dev/null +++ b/src/org/hibernate/bytecode/cglib/ProxyFactoryFactoryImpl.java @@ -0,0 +1,141 @@ +package org.hibernate.bytecode.cglib; + +import org.hibernate.bytecode.ProxyFactoryFactory; +import org.hibernate.bytecode.BasicProxyFactory; +import org.hibernate.proxy.ProxyFactory; +import org.hibernate.proxy.pojo.cglib.CGLIBProxyFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import net.sf.cglib.proxy.Enhancer; +import net.sf.cglib.proxy.CallbackFilter; +import net.sf.cglib.proxy.MethodInterceptor; +import net.sf.cglib.proxy.MethodProxy; +import net.sf.cglib.proxy.NoOp; +import net.sf.cglib.proxy.Callback; +import net.sf.cglib.proxy.Factory; + +import java.lang.reflect.Method; +import java.util.HashMap; + +/** + * A factory for CGLIB-based {@link ProxyFactory} instances. + * + * @author Steve Ebersole + */ +public class ProxyFactoryFactoryImpl implements ProxyFactoryFactory { + + /** + * Builds a CGLIB-based proxy factory. + * + * @return a new CGLIB-based proxy factory. + */ + public ProxyFactory buildProxyFactory() { + return new CGLIBProxyFactory(); + } + + public BasicProxyFactory buildBasicProxyFactory(Class superClass, Class[] interfaces) { + return new BasicProxyFactoryImpl( superClass, interfaces ); + } + + public static class BasicProxyFactoryImpl implements BasicProxyFactory { + private final Class proxyClass; + private final Factory factory; + + public BasicProxyFactoryImpl(Class superClass, Class[] interfaces) { + if ( superClass == null && ( interfaces == null || interfaces.length < 1 ) ) { + throw new AssertionFailure( "attempting to build proxy without any superclass or interfaces" ); + } + + Enhancer en = new Enhancer(); + en.setUseCache( false ); + en.setInterceptDuringConstruction( false ); + en.setUseFactory( true ); + en.setCallbackTypes( CALLBACK_TYPES ); + en.setCallbackFilter( FINALIZE_FILTER ); + if ( superClass != null ) { + en.setSuperclass( superClass ); + } + if ( interfaces != null && interfaces.length > 0 ) { + en.setInterfaces( interfaces ); + } + proxyClass = en.createClass(); + try { + factory = ( Factory ) proxyClass.newInstance(); + } + catch ( Throwable t ) { + throw new HibernateException( "Unable to build CGLIB Factory instance" ); + } + } + + public Object getProxy() { + try { + return factory.newInstance( + new Callback[] { new PassThroughInterceptor( proxyClass.getName() ), NoOp.INSTANCE } + ); + } + catch ( Throwable t ) { + throw new HibernateException( "Unable to instantiate proxy instance" ); + } + } + } + + private static final CallbackFilter FINALIZE_FILTER = new CallbackFilter() { + public int accept(Method method) { + if ( method.getParameterTypes().length == 0 && method.getName().equals("finalize") ){ + return 1; + } + else { + return 0; + } + } + }; + + private static final Class[] CALLBACK_TYPES = new Class[] { MethodInterceptor.class, NoOp.class }; + + private static class PassThroughInterceptor implements MethodInterceptor { + private HashMap data = new HashMap(); + private final String proxiedClassName; + + public PassThroughInterceptor(String proxiedClassName) { + this.proxiedClassName = proxiedClassName; + } + + public Object intercept( + Object obj, + Method method, + Object[] args, + MethodProxy proxy) throws Throwable { + String name = method.getName(); + if ( "toString".equals( name ) ) { + return proxiedClassName + "@" + System.identityHashCode( obj ); + } + else if ( "equals".equals( name ) ) { + return args[0] instanceof Factory && ( ( Factory ) args[0] ).getCallback( 0 ) == this + ? Boolean.TRUE + : Boolean.FALSE; + } + else if ( "hashCode".equals( name ) ) { + return new Integer( System.identityHashCode( obj ) ); + } + boolean hasGetterSignature = method.getParameterTypes().length == 0 && method.getReturnType() != null; + boolean hasSetterSignature = method.getParameterTypes().length == 1 && ( method.getReturnType() == null || method.getReturnType() == void.class ); + if ( name.startsWith( "get" ) && hasGetterSignature ) { + String propName = name.substring( 3 ); + return data.get( propName ); + } + else if ( name.startsWith( "is" ) && hasGetterSignature ) { + String propName = name.substring( 2 ); + return data.get( propName ); + } + else if ( name.startsWith( "set" ) && hasSetterSignature) { + String propName = name.substring( 3 ); + data.put( propName, args[0] ); + return null; + } + else { + // todo : what else to do here? + return null; + } + } + } +} diff --git a/src/org/hibernate/bytecode/cglib/ReflectionOptimizerImpl.java b/src/org/hibernate/bytecode/cglib/ReflectionOptimizerImpl.java new file mode 100644 index 0000000000..e92c6ea1a8 --- /dev/null +++ b/src/org/hibernate/bytecode/cglib/ReflectionOptimizerImpl.java @@ -0,0 +1,34 @@ +package org.hibernate.bytecode.cglib; + +import org.hibernate.bytecode.ReflectionOptimizer; + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; + +/** + * ReflectionOptimizer implementation for CGLIB. + * + * @author Steve Ebersole + */ +public class ReflectionOptimizerImpl implements ReflectionOptimizer, Serializable { + private transient InstantiationOptimizerAdapter instantiationOptimizer; + private transient AccessOptimizerAdapter accessOptimizer; + + public ReflectionOptimizerImpl( + InstantiationOptimizerAdapter instantiationOptimizer, + AccessOptimizerAdapter accessOptimizer) { + this.instantiationOptimizer = instantiationOptimizer; + this.accessOptimizer = accessOptimizer; + } + + public InstantiationOptimizer getInstantiationOptimizer() { + return instantiationOptimizer; + } + + public AccessOptimizer getAccessOptimizer() { + return accessOptimizer; + } + +} diff --git a/src/org/hibernate/bytecode/javassist/AccessOptimizerAdapter.java b/src/org/hibernate/bytecode/javassist/AccessOptimizerAdapter.java new file mode 100644 index 0000000000..c1ee941122 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/AccessOptimizerAdapter.java @@ -0,0 +1,81 @@ +package org.hibernate.bytecode.javassist; + +import org.hibernate.bytecode.ReflectionOptimizer; +import org.hibernate.PropertyAccessException; + +import java.io.Serializable; + +/** + * The {@link ReflectionOptimizer.AccessOptimizer} implementation for Javassist + * which simply acts as an adpater to the {@link BulkAccessor} class. + * + * @author Steve Ebersole + */ +public class AccessOptimizerAdapter implements ReflectionOptimizer.AccessOptimizer, Serializable { + + public static final String PROPERTY_GET_EXCEPTION = + "exception getting property value with Javassist (set hibernate.bytecode.use_reflection_optimizer=false for more info)"; + + public static final String PROPERTY_SET_EXCEPTION = + "exception setting property value with Javassist (set hibernate.bytecode.use_reflection_optimizer=false for more info)"; + + private final BulkAccessor bulkAccessor; + private final Class mappedClass; + + public AccessOptimizerAdapter(BulkAccessor bulkAccessor, Class mappedClass) { + this.bulkAccessor = bulkAccessor; + this.mappedClass = mappedClass; + } + + public String[] getPropertyNames() { + return bulkAccessor.getGetters(); + } + + public Object[] getPropertyValues(Object object) { + try { + return bulkAccessor.getPropertyValues( object ); + } + catch ( Throwable t ) { + throw new PropertyAccessException( + t, + PROPERTY_GET_EXCEPTION, + false, + mappedClass, + getterName( t, bulkAccessor ) + ); + } + } + + public void setPropertyValues(Object object, Object[] values) { + try { + bulkAccessor.setPropertyValues( object, values ); + } + catch ( Throwable t ) { + throw new PropertyAccessException( + t, + PROPERTY_SET_EXCEPTION, + true, + mappedClass, + setterName( t, bulkAccessor ) + ); + } + } + + private static String setterName(Throwable t, BulkAccessor accessor) { + if (t instanceof BulkAccessorException ) { + return accessor.getSetters()[ ( (BulkAccessorException) t ).getIndex() ]; + } + else { + return "?"; + } + } + + private static String getterName(Throwable t, BulkAccessor accessor) { + if (t instanceof BulkAccessorException ) { + return accessor.getGetters()[ ( (BulkAccessorException) t ).getIndex() ]; + } + else { + return "?"; + } + } +} diff --git a/src/org/hibernate/bytecode/javassist/BulkAccessor.java b/src/org/hibernate/bytecode/javassist/BulkAccessor.java new file mode 100644 index 0000000000..a7a4d14ec3 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/BulkAccessor.java @@ -0,0 +1,92 @@ +package org.hibernate.bytecode.javassist; + +import java.io.Serializable; + + +/** + * A JavaBean accessor. + *

    + *

    This object provides methods that set/get multiple properties + * of a JavaBean at once. This class and its support classes have been + * developed for the comaptibility with cglib + * (http://cglib.sourceforge.net/). + * + * @author Muga Nishizawa + * @author modified by Shigeru Chiba + */ +public abstract class BulkAccessor implements Serializable { + protected Class target; + protected String[] getters, setters; + protected Class[] types; + + protected BulkAccessor() { + } + + /** + * Obtains the values of properties of a given bean. + * + * @param bean JavaBean. + * @param values the obtained values are stored in this array. + */ + public abstract void getPropertyValues(Object bean, Object[] values); + + /** + * Sets properties of a given bean to specified values. + * + * @param bean JavaBean. + * @param values the values assinged to properties. + */ + public abstract void setPropertyValues(Object bean, Object[] values); + + /** + * Returns the values of properties of a given bean. + * + * @param bean JavaBean. + */ + public Object[] getPropertyValues(Object bean) { + Object[] values = new Object[getters.length]; + getPropertyValues( bean, values ); + return values; + } + + /** + * Returns the types of properties. + */ + public Class[] getPropertyTypes() { + return ( Class[] ) types.clone(); + } + + /** + * Returns the setter names of properties. + */ + public String[] getGetters() { + return ( String[] ) getters.clone(); + } + + /** + * Returns the getter names of the properties. + */ + public String[] getSetters() { + return ( String[] ) setters.clone(); + } + + /** + * Creates a new instance of BulkAccessor. + * The created instance provides methods for setting/getting + * specified properties at once. + * + * @param beanClass the class of the JavaBeans accessed + * through the created object. + * @param getters the names of setter methods for specified properties. + * @param setters the names of getter methods for specified properties. + * @param types the types of specified properties. + */ + public static BulkAccessor create( + Class beanClass, + String[] getters, + String[] setters, + Class[] types) { + BulkAccessorFactory factory = new BulkAccessorFactory( beanClass, getters, setters, types ); + return factory.create(); + } +} diff --git a/src/org/hibernate/bytecode/javassist/BulkAccessorException.java b/src/org/hibernate/bytecode/javassist/BulkAccessorException.java new file mode 100644 index 0000000000..497c282376 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/BulkAccessorException.java @@ -0,0 +1,78 @@ +package org.hibernate.bytecode.javassist; + +/** + * An exception thrown while generating a bulk accessor. + * + * @author Muga Nishizawa + * @author modified by Shigeru Chiba + */ +public class BulkAccessorException extends RuntimeException { + private Throwable myCause; + + /** + * Gets the cause of this throwable. + * It is for JDK 1.3 compatibility. + */ + public Throwable getCause() { + return (myCause == this ? null : myCause); + } + + /** + * Initializes the cause of this throwable. + * It is for JDK 1.3 compatibility. + */ + public synchronized Throwable initCause(Throwable cause) { + myCause = cause; + return this; + } + + private int index; + + /** + * Constructs an exception. + */ + public BulkAccessorException(String message) { + super(message); + index = -1; + initCause(null); + } + + /** + * Constructs an exception. + * + * @param index the index of the property that causes an exception. + */ + public BulkAccessorException(String message, int index) { + this(message + ": " + index); + this.index = index; + } + + /** + * Constructs an exception. + */ + public BulkAccessorException(String message, Throwable cause) { + super(message); + index = -1; + initCause(cause); + } + + /** + * Constructs an exception. + * + * @param index the index of the property that causes an exception. + */ + public BulkAccessorException(Throwable cause, int index) { + this("Property " + index); + this.index = index; + initCause(cause); + } + + /** + * Returns the index of the property that causes this exception. + * + * @return -1 if the index is not specified. + */ + public int getIndex() { + return this.index; + } +} diff --git a/src/org/hibernate/bytecode/javassist/BulkAccessorFactory.java b/src/org/hibernate/bytecode/javassist/BulkAccessorFactory.java new file mode 100644 index 0000000000..1821a8ac25 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/BulkAccessorFactory.java @@ -0,0 +1,388 @@ +package org.hibernate.bytecode.javassist; + +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.security.ProtectionDomain; + +import javassist.CannotCompileException; +import javassist.bytecode.AccessFlag; +import javassist.bytecode.Bytecode; +import javassist.bytecode.ClassFile; +import javassist.bytecode.ConstPool; +import javassist.bytecode.MethodInfo; +import javassist.bytecode.Opcode; +import javassist.util.proxy.FactoryHelper; +import javassist.util.proxy.RuntimeSupport; + +/** + * A factory of bulk accessors. + * + * @author Muga Nishizawa + * @author modified by Shigeru Chiba + */ +class BulkAccessorFactory { + private static final String PACKAGE_NAME_PREFIX = "org.javassist.tmp."; + private static final String BULKACESSOR_CLASS_NAME = BulkAccessor.class.getName(); + private static final String OBJECT_CLASS_NAME = Object.class.getName(); + private static final String GENERATED_GETTER_NAME = "getPropertyValues"; + private static final String GENERATED_SETTER_NAME = "setPropertyValues"; + private static final String GET_SETTER_DESC = "(Ljava/lang/Object;[Ljava/lang/Object;)V"; + private static final String THROWABLE_CLASS_NAME = Throwable.class.getName(); + private static final String BULKEXCEPTION_CLASS_NAME = BulkAccessorException.class.getName(); + private static int counter = 0; + + private Class targetBean; + private String[] getterNames; + private String[] setterNames; + private Class[] types; + public String writeDirectory; + + BulkAccessorFactory( + Class target, + String[] getterNames, + String[] setterNames, + Class[] types) { + this.targetBean = target; + this.getterNames = getterNames; + this.setterNames = setterNames; + this.types = types; + this.writeDirectory = null; + } + + BulkAccessor create() { + Method[] getters = new Method[getterNames.length]; + Method[] setters = new Method[setterNames.length]; + findAccessors( targetBean, getterNames, setterNames, types, getters, setters ); + + Class beanClass; + try { + ClassFile classfile = make( getters, setters ); + ClassLoader loader = this.getClassLoader(); + if ( writeDirectory != null ) { + FactoryHelper.writeFile( classfile, writeDirectory ); + } + + beanClass = FactoryHelper.toClass( classfile, loader, getDomain() ); + return ( BulkAccessor ) this.newInstance( beanClass ); + } + catch ( Exception e ) { + throw new BulkAccessorException( e.getMessage(), e ); + } + } + + private ProtectionDomain getDomain() { + Class cl; + if ( this.targetBean != null ) { + cl = this.targetBean; + } + else { + cl = this.getClass(); + } + return cl.getProtectionDomain(); + } + + private ClassFile make(Method[] getters, Method[] setters) throws CannotCompileException { + String className = targetBean.getName(); + // set the name of bulk accessor. + className = className + "_$$_bulkaccess_" + counter++; + if ( className.startsWith( "java." ) ) { + className = "org.javassist.tmp." + className; + } + + ClassFile classfile = new ClassFile( false, className, BULKACESSOR_CLASS_NAME ); + classfile.setAccessFlags( AccessFlag.PUBLIC ); + addDefaultConstructor( classfile ); + addGetter( classfile, getters ); + addSetter( classfile, setters ); + return classfile; + } + + private ClassLoader getClassLoader() { + if ( targetBean != null && targetBean.getName().equals( OBJECT_CLASS_NAME ) ) { + return targetBean.getClassLoader(); + } + else { + return getClass().getClassLoader(); + } + } + + private Object newInstance(Class type) throws Exception { + BulkAccessor instance = ( BulkAccessor ) type.newInstance(); + instance.target = targetBean; + int len = getterNames.length; + instance.getters = new String[len]; + instance.setters = new String[len]; + instance.types = new Class[len]; + for ( int i = 0; i < len; i++ ) { + instance.getters[i] = getterNames[i]; + instance.setters[i] = setterNames[i]; + instance.types[i] = types[i]; + } + + return instance; + } + + /** + * Declares a constructor that takes no parameter. + * + * @param classfile + * @throws CannotCompileException + */ + private void addDefaultConstructor(ClassFile classfile) throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + String cons_desc = "()V"; + MethodInfo mi = new MethodInfo( cp, MethodInfo.nameInit, cons_desc ); + + Bytecode code = new Bytecode( cp, 0, 1 ); + // aload_0 + code.addAload( 0 ); + // invokespecial + code.addInvokespecial( BulkAccessor.class.getName(), MethodInfo.nameInit, cons_desc ); + // return + code.addOpcode( Opcode.RETURN ); + + mi.setCodeAttribute( code.toCodeAttribute() ); + mi.setAccessFlags( AccessFlag.PUBLIC ); + classfile.addMethod( mi ); + } + + private void addGetter(ClassFile classfile, final Method[] getters) throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + int target_type_index = cp.addClassInfo( this.targetBean.getName() ); + String desc = GET_SETTER_DESC; + MethodInfo mi = new MethodInfo( cp, GENERATED_GETTER_NAME, desc ); + + Bytecode code = new Bytecode( cp, 6, 4 ); + /* | this | bean | args | raw bean | */ + if ( getters.length >= 0 ) { + // aload_1 // load bean + code.addAload( 1 ); + // checkcast // cast bean + code.addCheckcast( this.targetBean.getName() ); + // astore_3 // store bean + code.addAstore( 3 ); + for ( int i = 0; i < getters.length; ++i ) { + if ( getters[i] != null ) { + Method getter = getters[i]; + // aload_2 // args + code.addAload( 2 ); + // iconst_i // continue to aastore + code.addIconst( i ); // growing stack is 1 + Class returnType = getter.getReturnType(); + int typeIndex = -1; + if ( returnType.isPrimitive() ) { + typeIndex = FactoryHelper.typeIndex( returnType ); + // new + code.addNew( FactoryHelper.wrapperTypes[typeIndex] ); + // dup + code.addOpcode( Opcode.DUP ); + } + + // aload_3 // load the raw bean + code.addAload( 3 ); + String getter_desc = RuntimeSupport.makeDescriptor( getter ); + String getterName = getter.getName(); + if ( this.targetBean.isInterface() ) { + // invokeinterface + code.addInvokeinterface( target_type_index, getterName, getter_desc, 1 ); + } + else { + // invokevirtual + code.addInvokevirtual( target_type_index, getterName, getter_desc ); + } + + if ( typeIndex >= 0 ) { // is a primitive type + // invokespecial + code.addInvokespecial( + FactoryHelper.wrapperTypes[typeIndex], + MethodInfo.nameInit, + FactoryHelper.wrapperDesc[typeIndex] + ); + } + + // aastore // args + code.add( Opcode.AASTORE ); + code.growStack( -3 ); + } + } + } + // return + code.addOpcode( Opcode.RETURN ); + + mi.setCodeAttribute( code.toCodeAttribute() ); + mi.setAccessFlags( AccessFlag.PUBLIC ); + classfile.addMethod( mi ); + } + + private void addSetter(ClassFile classfile, final Method[] setters) throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + int target_type_index = cp.addClassInfo( this.targetBean.getName() ); + String desc = GET_SETTER_DESC; + MethodInfo mi = new MethodInfo( cp, GENERATED_SETTER_NAME, desc ); + + Bytecode code = new Bytecode( cp, 4, 6 ); + /* | this | bean | args | i | raw bean | exception | */ + if ( setters.length > 0 ) { + int start, end; // required to exception table + // iconst_0 // i + code.addIconst( 0 ); + // istore_3 // store i + code.addIstore( 3 ); + // aload_1 // load the bean + code.addAload( 1 ); + // checkcast // cast the bean into a raw bean + code.addCheckcast( this.targetBean.getName() ); + // astore 4 // store the raw bean + code.addAstore( 4 ); + /* current stack len = 0 */ + // start region to handling exception (BulkAccessorException) + start = code.currentPc(); + int lastIndex = 0; + for ( int i = 0; i < setters.length; ++i ) { + if ( setters[i] != null ) { + int diff = i - lastIndex; + if ( diff > 0 ) { + // iinc 3, 1 + code.addOpcode( Opcode.IINC ); + code.add( 3 ); + code.add( diff ); + lastIndex = i; + } + } + /* current stack len = 0 */ + // aload 4 // load the raw bean + code.addAload( 4 ); + // aload_2 // load the args + code.addAload( 2 ); + // iconst_i + code.addIconst( i ); + // aaload + code.addOpcode( Opcode.AALOAD ); + // checkcast + Class[] setterParamTypes = setters[i].getParameterTypes(); + Class setterParamType = setterParamTypes[0]; + if ( setterParamType.isPrimitive() ) { + // checkcast (case of primitive type) + // invokevirtual (case of primitive type) + this.addUnwrapper( classfile, code, setterParamType ); + } + else { + // checkcast (case of reference type) + code.addCheckcast( setterParamType.getName() ); + } + /* current stack len = 2 */ + String rawSetterMethod_desc = RuntimeSupport.makeDescriptor( setters[i] ); + if ( !this.targetBean.isInterface() ) { + // invokevirtual + code.addInvokevirtual( target_type_index, setters[i].getName(), rawSetterMethod_desc ); + } + else { + // invokeinterface + Class[] params = setters[i].getParameterTypes(); + int size; + if ( params[0].equals( Double.TYPE ) || params[0].equals( Long.TYPE ) ) { + size = 3; + } + else { + size = 2; + } + + code.addInvokeinterface( target_type_index, setters[i].getName(), rawSetterMethod_desc, size ); + } + } + + // end region to handling exception (BulkAccessorException) + end = code.currentPc(); + // return + code.addOpcode( Opcode.RETURN ); + /* current stack len = 0 */ + // register in exception table + int throwableType_index = cp.addClassInfo( THROWABLE_CLASS_NAME ); + code.addExceptionHandler( start, end, code.currentPc(), throwableType_index ); + // astore 5 // store exception + code.addAstore( 5 ); + // new // BulkAccessorException + code.addNew( BULKEXCEPTION_CLASS_NAME ); + // dup + code.addOpcode( Opcode.DUP ); + // aload 5 // load exception + code.addAload( 5 ); + // iload_3 // i + code.addIload( 3 ); + // invokespecial // BulkAccessorException. + String cons_desc = "(Ljava/lang/Throwable;I)V"; + code.addInvokespecial( BULKEXCEPTION_CLASS_NAME, MethodInfo.nameInit, cons_desc ); + // athrow + code.addOpcode( Opcode.ATHROW ); + } + else { + // return + code.addOpcode( Opcode.RETURN ); + } + + mi.setCodeAttribute( code.toCodeAttribute() ); + mi.setAccessFlags( AccessFlag.PUBLIC ); + classfile.addMethod( mi ); + } + + private void addUnwrapper( + ClassFile classfile, + Bytecode code, + Class type) { + int index = FactoryHelper.typeIndex( type ); + String wrapperType = FactoryHelper.wrapperTypes[index]; + // checkcast + code.addCheckcast( wrapperType ); + // invokevirtual + code.addInvokevirtual( wrapperType, FactoryHelper.unwarpMethods[index], FactoryHelper.unwrapDesc[index] ); + } + + private static void findAccessors( + Class clazz, + String[] getterNames, + String[] setterNames, + Class[] types, + Method[] getters, + Method[] setters) { + int length = types.length; + if ( setterNames.length != length || getterNames.length != length ) { + throw new BulkAccessorException( "bad number of accessors" ); + } + + Class[] getParam = new Class[0]; + Class[] setParam = new Class[1]; + for ( int i = 0; i < length; i++ ) { + if ( getterNames[i] != null ) { + Method getter = findAccessor( clazz, getterNames[i], getParam, i ); + if ( getter.getReturnType() != types[i] ) { + throw new BulkAccessorException( "wrong return type: " + getterNames[i], i ); + } + + getters[i] = getter; + } + + if ( setterNames[i] != null ) { + setParam[0] = types[i]; + setters[i] = findAccessor( clazz, setterNames[i], setParam, i ); + } + } + } + + private static Method findAccessor( + Class clazz, + String name, + Class[] params, + int index) throws BulkAccessorException { + try { + Method method = clazz.getDeclaredMethod( name, params ); + if ( Modifier.isPrivate( method.getModifiers() ) ) { + throw new BulkAccessorException( "private property", index ); + } + + return method; + } + catch ( NoSuchMethodException e ) { + throw new BulkAccessorException( "cannot find an accessor", index ); + } + } +} diff --git a/src/org/hibernate/bytecode/javassist/BytecodeProviderImpl.java b/src/org/hibernate/bytecode/javassist/BytecodeProviderImpl.java new file mode 100644 index 0000000000..80d2ccb64f --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/BytecodeProviderImpl.java @@ -0,0 +1,84 @@ +package org.hibernate.bytecode.javassist; + +import java.lang.reflect.Modifier; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.bytecode.BytecodeProvider; +import org.hibernate.bytecode.ClassTransformer; +import org.hibernate.bytecode.ProxyFactoryFactory; +import org.hibernate.bytecode.ReflectionOptimizer; +import org.hibernate.bytecode.util.ClassFilter; +import org.hibernate.bytecode.util.FieldFilter; +import org.hibernate.util.StringHelper; + +/** + * Bytecode provider implementation for Javassist. + * + * @author Steve Ebersole + */ +public class BytecodeProviderImpl implements BytecodeProvider { + + private static final Log log = LogFactory.getLog( BytecodeProviderImpl.class ); + + public ProxyFactoryFactory getProxyFactoryFactory() { + return new ProxyFactoryFactoryImpl(); + } + + public ReflectionOptimizer getReflectionOptimizer( + Class clazz, + String[] getterNames, + String[] setterNames, + Class[] types) { + FastClass fastClass; + BulkAccessor bulkAccessor; + try { + fastClass = FastClass.create( clazz ); + bulkAccessor = BulkAccessor.create( clazz, getterNames, setterNames, types ); + if ( !clazz.isInterface() && !Modifier.isAbstract( clazz.getModifiers() ) ) { + if ( fastClass == null ) { + bulkAccessor = null; + } + else { + //test out the optimizer: + Object instance = fastClass.newInstance(); + bulkAccessor.setPropertyValues( instance, bulkAccessor.getPropertyValues( instance ) ); + } + } + } + catch ( Throwable t ) { + fastClass = null; + bulkAccessor = null; + String message = "reflection optimizer disabled for: " + + clazz.getName() + + " [" + + StringHelper.unqualify( t.getClass().getName() ) + + ": " + + t.getMessage(); + + if ( t instanceof BulkAccessorException ) { + int index = ( ( BulkAccessorException ) t ).getIndex(); + if ( index >= 0 ) { + message += " (property " + setterNames[index] + ")"; + } + } + + log.debug( message ); + } + + if ( fastClass != null && bulkAccessor != null ) { + return new ReflectionOptimizerImpl( + new InstantiationOptimizerAdapter( fastClass ), + new AccessOptimizerAdapter( bulkAccessor, clazz ) + ); + } + else { + return null; + } + } + + public ClassTransformer getTransformer(ClassFilter classFilter, FieldFilter fieldFilter) { + return new JavassistClassTransformer( classFilter, fieldFilter ); + } + +} diff --git a/src/org/hibernate/bytecode/javassist/FastClass.java b/src/org/hibernate/bytecode/javassist/FastClass.java new file mode 100644 index 0000000000..60ae94ce60 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/FastClass.java @@ -0,0 +1,170 @@ +package org.hibernate.bytecode.javassist; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.io.Serializable; + +/** + * @author Muga Nishizawa + */ +public class FastClass implements Serializable { + + private static final Class[] EMPTY_CLASS_ARRAY = new Class[0]; + + private Class type; + + private FastClass() { + } + + private FastClass(Class type) { + this.type = type; + } + + public Object invoke( + String name, + Class[] parameterTypes, + Object obj, + Object[] args) throws InvocationTargetException { + return this.invoke( this.getIndex( name, parameterTypes ), obj, args ); + } + + public Object invoke( + int index, + Object obj, + Object[] args) throws InvocationTargetException { + Method[] methods = this.type.getMethods(); + try { + return methods[index].invoke( obj, args ); + } + catch ( ArrayIndexOutOfBoundsException e ) { + throw new IllegalArgumentException( + "Cannot find matching method/constructor" + ); + } + catch ( IllegalAccessException e ) { + throw new InvocationTargetException( e ); + } + } + + public Object newInstance() throws InvocationTargetException { + return this.newInstance( this.getIndex( EMPTY_CLASS_ARRAY ), null ); + } + + public Object newInstance( + Class[] parameterTypes, + Object[] args) throws InvocationTargetException { + return this.newInstance( this.getIndex( parameterTypes ), args ); + } + + public Object newInstance( + int index, + Object[] args) throws InvocationTargetException { + Constructor[] conss = this.type.getConstructors(); + try { + return conss[index].newInstance( args ); + } + catch ( ArrayIndexOutOfBoundsException e ) { + throw new IllegalArgumentException( "Cannot find matching method/constructor" ); + } + catch ( InstantiationException e ) { + throw new InvocationTargetException( e ); + } + catch ( IllegalAccessException e ) { + throw new InvocationTargetException( e ); + } + } + + public int getIndex(String name, Class[] parameterTypes) { + Method[] methods = this.type.getMethods(); + boolean eq = true; + for ( int i = 0; i < methods.length; ++i ) { + if ( !Modifier.isPublic( methods[i].getModifiers() ) ) { + continue; + } + if ( !methods[i].getName().equals( name ) ) { + continue; + } + Class[] params = methods[i].getParameterTypes(); + if ( params.length != parameterTypes.length ) { + continue; + } + eq = true; + for ( int j = 0; j < params.length; ++j ) { + if ( !params[j].equals( parameterTypes[j] ) ) { + eq = false; + break; + } + } + if ( eq ) { + return i; + } + } + return -1; + } + + public int getIndex(Class[] parameterTypes) { + Constructor[] conss = this.type.getConstructors(); + boolean eq = true; + for ( int i = 0; i < conss.length; ++i ) { + if ( !Modifier.isPublic( conss[i].getModifiers() ) ) { + continue; + } + Class[] params = conss[i].getParameterTypes(); + if ( params.length != parameterTypes.length ) { + continue; + } + eq = true; + for ( int j = 0; j < params.length; ++j ) { + if ( !params[j].equals( parameterTypes[j] ) ) { + eq = false; + break; + } + } + if ( eq ) { + return i; + } + } + return -1; + } + + public int getMaxIndex() { + Method[] methods = this.type.getMethods(); + int count = 0; + for ( int i = 0; i < methods.length; ++i ) { + if ( Modifier.isPublic( methods[i].getModifiers() ) ) { + count++; + } + } + return count; + } + + public String getName() { + return this.type.getName(); + } + + public Class getJavaClass() { + return this.type; + } + + public String toString() { + return this.type.toString(); + } + + public int hashCode() { + return this.type.hashCode(); + } + + public boolean equals(Object o) { + if ( !( o instanceof FastClass ) ) { + return false; + } + return this.type.equals( ( ( FastClass ) o ).type ); + } + + public static FastClass create(Class type) { + FastClass fc = new FastClass( type ); + return fc; + } +} diff --git a/src/org/hibernate/bytecode/javassist/FieldFilter.java b/src/org/hibernate/bytecode/javassist/FieldFilter.java new file mode 100644 index 0000000000..7a5ee0d5a6 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/FieldFilter.java @@ -0,0 +1,28 @@ +package org.hibernate.bytecode.javassist; + +/** + * Contract for deciding whether fields should be read and/or write intercepted. + * + * @author Muga Nishizawa + */ +public interface FieldFilter { + /** + * Should the given field be read intercepted? + * + * @param desc + * @param name + * @return true if the given field should be read intercepted; otherwise + * false. + */ + boolean handleRead(String desc, String name); + + /** + * Should the given field be write intercepted? + * + * @param desc + * @param name + * @return true if the given field should be write intercepted; otherwise + * false. + */ + boolean handleWrite(String desc, String name); +} diff --git a/src/org/hibernate/bytecode/javassist/FieldHandled.java b/src/org/hibernate/bytecode/javassist/FieldHandled.java new file mode 100644 index 0000000000..c25fbef8a6 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/FieldHandled.java @@ -0,0 +1,23 @@ +package org.hibernate.bytecode.javassist; + +/** + * Interface introduced to the enhanced class in order to be able to + * inject a {@link FieldHandler} to define the interception behavior. + * + * @author Muga Nishizawa + */ +public interface FieldHandled { + /** + * Inject the field interception handler to be used. + * + * @param handler The field interception handler. + */ + public void setFieldHandler(FieldHandler handler); + + /** + * Access to the current field interception handler. + * + * @return The current field interception handler. + */ + public FieldHandler getFieldHandler(); +} diff --git a/src/org/hibernate/bytecode/javassist/FieldHandler.java b/src/org/hibernate/bytecode/javassist/FieldHandler.java new file mode 100644 index 0000000000..66ca6edcb8 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/FieldHandler.java @@ -0,0 +1,56 @@ +package org.hibernate.bytecode.javassist; + +/** + * The interface defining how interception of a field should be handled. + * + * @author Muga Nishizawa + */ +public interface FieldHandler { + + /** + * Called to handle writing an int value to a given field. + * + * @param obj ? + * @param name The name of the field being written + * @param oldValue The old field value + * @param newValue The new field value. + * @return ? + */ + int writeInt(Object obj, String name, int oldValue, int newValue); + + char writeChar(Object obj, String name, char oldValue, char newValue); + + byte writeByte(Object obj, String name, byte oldValue, byte newValue); + + boolean writeBoolean(Object obj, String name, boolean oldValue, + boolean newValue); + + short writeShort(Object obj, String name, short oldValue, short newValue); + + float writeFloat(Object obj, String name, float oldValue, float newValue); + + double writeDouble(Object obj, String name, double oldValue, double newValue); + + long writeLong(Object obj, String name, long oldValue, long newValue); + + Object writeObject(Object obj, String name, Object oldValue, Object newValue); + + int readInt(Object obj, String name, int oldValue); + + char readChar(Object obj, String name, char oldValue); + + byte readByte(Object obj, String name, byte oldValue); + + boolean readBoolean(Object obj, String name, boolean oldValue); + + short readShort(Object obj, String name, short oldValue); + + float readFloat(Object obj, String name, float oldValue); + + double readDouble(Object obj, String name, double oldValue); + + long readLong(Object obj, String name, long oldValue); + + Object readObject(Object obj, String name, Object oldValue); + +} diff --git a/src/org/hibernate/bytecode/javassist/FieldTransformer.java b/src/org/hibernate/bytecode/javassist/FieldTransformer.java new file mode 100644 index 0000000000..2394cb41d0 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/FieldTransformer.java @@ -0,0 +1,592 @@ +package org.hibernate.bytecode.javassist; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +import javassist.CannotCompileException; +import javassist.bytecode.AccessFlag; +import javassist.bytecode.BadBytecode; +import javassist.bytecode.Bytecode; +import javassist.bytecode.ClassFile; +import javassist.bytecode.CodeAttribute; +import javassist.bytecode.CodeIterator; +import javassist.bytecode.ConstPool; +import javassist.bytecode.Descriptor; +import javassist.bytecode.FieldInfo; +import javassist.bytecode.MethodInfo; +import javassist.bytecode.Opcode; +import org.hibernate.bytecode.javassist.FieldFilter; +import org.hibernate.bytecode.javassist.FieldHandled; +import org.hibernate.bytecode.javassist.FieldHandler; + +/** + * The thing that handles actual class enhancement in regards to + * intercepting field accesses. + * + * @author Muga Nishizawa + */ +public class FieldTransformer { + + private static final String EACH_READ_METHOD_PREFIX = "$javassist_read_"; + + private static final String EACH_WRITE_METHOD_PREFIX = "$javassist_write_"; + + private static final String FIELD_HANDLED_TYPE_NAME = FieldHandled.class + .getName(); + + private static final String HANDLER_FIELD_NAME = "$JAVASSIST_READ_WRITE_HANDLER"; + + private static final String FIELD_HANDLER_TYPE_NAME = FieldHandler.class + .getName(); + + private static final String HANDLER_FIELD_DESCRIPTOR = 'L' + FIELD_HANDLER_TYPE_NAME + .replace('.', '/') + ';'; + + private static final String GETFIELDHANDLER_METHOD_NAME = "getFieldHandler"; + + private static final String SETFIELDHANDLER_METHOD_NAME = "setFieldHandler"; + + private static final String GETFIELDHANDLER_METHOD_DESCRIPTOR = "()" + + HANDLER_FIELD_DESCRIPTOR; + + private static final String SETFIELDHANDLER_METHOD_DESCRIPTOR = "(" + + HANDLER_FIELD_DESCRIPTOR + ")V"; + + private FieldFilter filter; + + private HashMap readableFields; + + private HashMap writableFields; + + public FieldTransformer() { + this(null); + } + + public FieldTransformer(FieldFilter f) { + filter = f; + readableFields = new HashMap(); + writableFields = new HashMap(); + } + + public void setFieldFilter(FieldFilter f) { + filter = f; + } + + public void transform(File file) throws Exception { + DataInputStream in = new DataInputStream(new FileInputStream(file)); + ClassFile classfile = new ClassFile(in); + transform(classfile); + DataOutputStream out = new DataOutputStream(new FileOutputStream(file)); + try { + classfile.write(out); + } finally { + out.close(); + } + } + + public void transform(ClassFile classfile) throws Exception { + if (classfile.isInterface()) { + return; + } + try { + addFieldHandlerField(classfile); + addGetFieldHandlerMethod(classfile); + addSetFieldHandlerMethod(classfile); + addFieldHandledInterface(classfile); + addReadWriteMethods(classfile); + transformInvokevirtualsIntoPutAndGetfields(classfile); + } catch (CannotCompileException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + private void addFieldHandlerField(ClassFile classfile) + throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + FieldInfo finfo = new FieldInfo(cp, HANDLER_FIELD_NAME, + HANDLER_FIELD_DESCRIPTOR); + finfo.setAccessFlags(AccessFlag.PRIVATE | AccessFlag.TRANSIENT); + classfile.addField(finfo); + } + + private void addGetFieldHandlerMethod(ClassFile classfile) + throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + int this_class_index = cp.getThisClassInfo(); + MethodInfo minfo = new MethodInfo(cp, GETFIELDHANDLER_METHOD_NAME, + GETFIELDHANDLER_METHOD_DESCRIPTOR); + /* local variable | this | */ + Bytecode code = new Bytecode(cp, 2, 1); + // aload_0 // load this + code.addAload(0); + // getfield // get field "$JAVASSIST_CALLBACK" defined already + code.addOpcode(Opcode.GETFIELD); + int field_index = cp.addFieldrefInfo(this_class_index, + HANDLER_FIELD_NAME, HANDLER_FIELD_DESCRIPTOR); + code.addIndex(field_index); + // areturn // return the value of the field + code.addOpcode(Opcode.ARETURN); + minfo.setCodeAttribute(code.toCodeAttribute()); + minfo.setAccessFlags(AccessFlag.PUBLIC); + classfile.addMethod(minfo); + } + + private void addSetFieldHandlerMethod(ClassFile classfile) + throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + int this_class_index = cp.getThisClassInfo(); + MethodInfo minfo = new MethodInfo(cp, SETFIELDHANDLER_METHOD_NAME, + SETFIELDHANDLER_METHOD_DESCRIPTOR); + /* local variables | this | callback | */ + Bytecode code = new Bytecode(cp, 3, 3); + // aload_0 // load this + code.addAload(0); + // aload_1 // load callback + code.addAload(1); + // putfield // put field "$JAVASSIST_CALLBACK" defined already + code.addOpcode(Opcode.PUTFIELD); + int field_index = cp.addFieldrefInfo(this_class_index, + HANDLER_FIELD_NAME, HANDLER_FIELD_DESCRIPTOR); + code.addIndex(field_index); + // return + code.addOpcode(Opcode.RETURN); + minfo.setCodeAttribute(code.toCodeAttribute()); + minfo.setAccessFlags(AccessFlag.PUBLIC); + classfile.addMethod(minfo); + } + + private void addFieldHandledInterface(ClassFile classfile) { + String[] interfaceNames = classfile.getInterfaces(); + String[] newInterfaceNames = new String[interfaceNames.length + 1]; + System.arraycopy(interfaceNames, 0, newInterfaceNames, 0, + interfaceNames.length); + newInterfaceNames[newInterfaceNames.length - 1] = FIELD_HANDLED_TYPE_NAME; + classfile.setInterfaces(newInterfaceNames); + } + + private void addReadWriteMethods(ClassFile classfile) + throws CannotCompileException { + List fields = classfile.getFields(); + for (Iterator field_iter = fields.iterator(); field_iter.hasNext();) { + FieldInfo finfo = (FieldInfo) field_iter.next(); + if ((finfo.getAccessFlags() & AccessFlag.STATIC) == 0 + && (!finfo.getName().equals(HANDLER_FIELD_NAME))) { + // case of non-static field + if (filter.handleRead(finfo.getDescriptor(), finfo + .getName())) { + addReadMethod(classfile, finfo); + readableFields.put(finfo.getName(), finfo + .getDescriptor()); + } + if (filter.handleWrite(finfo.getDescriptor(), finfo + .getName())) { + addWriteMethod(classfile, finfo); + writableFields.put(finfo.getName(), finfo + .getDescriptor()); + } + } + } + } + + private void addReadMethod(ClassFile classfile, FieldInfo finfo) + throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + int this_class_index = cp.getThisClassInfo(); + String desc = "()" + finfo.getDescriptor(); + MethodInfo minfo = new MethodInfo(cp, EACH_READ_METHOD_PREFIX + + finfo.getName(), desc); + /* local variables | target obj | each oldvalue | */ + Bytecode code = new Bytecode(cp, 5, 3); + // aload_0 + code.addAload(0); + // getfield // get each field + code.addOpcode(Opcode.GETFIELD); + int base_field_index = cp.addFieldrefInfo(this_class_index, finfo + .getName(), finfo.getDescriptor()); + code.addIndex(base_field_index); + // aload_0 + code.addAload(0); + // invokeinterface // invoke Enabled.getInterceptFieldCallback() + int enabled_class_index = cp.addClassInfo(FIELD_HANDLED_TYPE_NAME); + code.addInvokeinterface(enabled_class_index, + GETFIELDHANDLER_METHOD_NAME, GETFIELDHANDLER_METHOD_DESCRIPTOR, + 1); + // ifnonnull + code.addOpcode(Opcode.IFNONNULL); + code.addIndex(4); + // *return // each type + addTypeDependDataReturn(code, finfo.getDescriptor()); + // *store_1 // each type + addTypeDependDataStore(code, finfo.getDescriptor(), 1); + // aload_0 + code.addAload(0); + // invokeinterface // invoke Enabled.getInterceptFieldCallback() + code.addInvokeinterface(enabled_class_index, + GETFIELDHANDLER_METHOD_NAME, GETFIELDHANDLER_METHOD_DESCRIPTOR, + 1); + // aload_0 + code.addAload(0); + // ldc // name of the field + code.addLdc(finfo.getName()); + // *load_1 // each type + addTypeDependDataLoad(code, finfo.getDescriptor(), 1); + // invokeinterface // invoke Callback.read*() // each type + addInvokeFieldHandlerMethod(classfile, code, finfo.getDescriptor(), + true); + // *return // each type + addTypeDependDataReturn(code, finfo.getDescriptor()); + + minfo.setCodeAttribute(code.toCodeAttribute()); + minfo.setAccessFlags(AccessFlag.PUBLIC); + classfile.addMethod(minfo); + } + + private void addWriteMethod(ClassFile classfile, FieldInfo finfo) + throws CannotCompileException { + ConstPool cp = classfile.getConstPool(); + int this_class_index = cp.getThisClassInfo(); + String desc = "(" + finfo.getDescriptor() + ")V"; + MethodInfo minfo = new MethodInfo(cp, EACH_WRITE_METHOD_PREFIX + + finfo.getName(), desc); + /* local variables | target obj | each oldvalue | */ + Bytecode code = new Bytecode(cp, 6, 3); + // aload_0 + code.addAload(0); + // invokeinterface // enabled.getInterceptFieldCallback() + int enabled_class_index = cp.addClassInfo(FIELD_HANDLED_TYPE_NAME); + code.addInvokeinterface(enabled_class_index, + GETFIELDHANDLER_METHOD_NAME, GETFIELDHANDLER_METHOD_DESCRIPTOR, + 1); + // ifnonnull (label1) + code.addOpcode(Opcode.IFNONNULL); + code.addIndex(9); + // aload_0 + code.addAload(0); + // *load_1 + addTypeDependDataLoad(code, finfo.getDescriptor(), 1); + // putfield + code.addOpcode(Opcode.PUTFIELD); + int base_field_index = cp.addFieldrefInfo(this_class_index, finfo + .getName(), finfo.getDescriptor()); + code.addIndex(base_field_index); + code.growStack(-Descriptor.dataSize(finfo.getDescriptor())); + // return ; + code.addOpcode(Opcode.RETURN); + // aload_0 + code.addAload(0); + // dup + code.addOpcode(Opcode.DUP); + // invokeinterface // enabled.getInterceptFieldCallback() + code.addInvokeinterface(enabled_class_index, + GETFIELDHANDLER_METHOD_NAME, GETFIELDHANDLER_METHOD_DESCRIPTOR, + 1); + // aload_0 + code.addAload(0); + // ldc // field name + code.addLdc(finfo.getName()); + // aload_0 + code.addAload(0); + // getfield // old value of the field + code.addOpcode(Opcode.GETFIELD); + code.addIndex(base_field_index); + code.growStack(Descriptor.dataSize(finfo.getDescriptor()) - 1); + // *load_1 + addTypeDependDataLoad(code, finfo.getDescriptor(), 1); + // invokeinterface // callback.write*(..) + addInvokeFieldHandlerMethod(classfile, code, finfo.getDescriptor(), + false); + // putfield // new value of the field + code.addOpcode(Opcode.PUTFIELD); + code.addIndex(base_field_index); + code.growStack(-Descriptor.dataSize(finfo.getDescriptor())); + // return + code.addOpcode(Opcode.RETURN); + + minfo.setCodeAttribute(code.toCodeAttribute()); + minfo.setAccessFlags(AccessFlag.PUBLIC); + classfile.addMethod(minfo); + } + + private void transformInvokevirtualsIntoPutAndGetfields(ClassFile classfile) + throws CannotCompileException { + List methods = classfile.getMethods(); + for (Iterator method_iter = methods.iterator(); method_iter.hasNext();) { + MethodInfo minfo = (MethodInfo) method_iter.next(); + String methodName = minfo.getName(); + if (methodName.startsWith(EACH_READ_METHOD_PREFIX) + || methodName.startsWith(EACH_WRITE_METHOD_PREFIX) + || methodName.equals(GETFIELDHANDLER_METHOD_NAME) + || methodName.equals(SETFIELDHANDLER_METHOD_NAME)) { + continue; + } + CodeAttribute codeAttr = minfo.getCodeAttribute(); + if (codeAttr == null) { + return; + } + CodeIterator iter = codeAttr.iterator(); + while (iter.hasNext()) { + try { + int pos = iter.next(); + pos = transformInvokevirtualsIntoGetfields(classfile, iter, + pos); + pos = transformInvokevirtualsIntoPutfields(classfile, iter, + pos); + + } catch (BadBytecode e) { + throw new CannotCompileException(e); + } + } + } + } + + private int transformInvokevirtualsIntoGetfields(ClassFile classfile, + CodeIterator iter, int pos) { + ConstPool cp = classfile.getConstPool(); + int c = iter.byteAt(pos); + if (c != Opcode.GETFIELD) { + return pos; + } + int index = iter.u16bitAt(pos + 1); + String fieldName = cp.getFieldrefName(index); + String className = cp.getFieldrefClassName(index); + if ((!classfile.getName().equals(className)) + || (!readableFields.containsKey(fieldName))) { + return pos; + } + String desc = "()" + (String) readableFields.get(fieldName); + int read_method_index = cp.addMethodrefInfo(cp.getThisClassInfo(), + EACH_READ_METHOD_PREFIX + fieldName, desc); + iter.writeByte(Opcode.INVOKEVIRTUAL, pos); + iter.write16bit(read_method_index, pos + 1); + return pos; + } + + private int transformInvokevirtualsIntoPutfields(ClassFile classfile, + CodeIterator iter, int pos) { + ConstPool cp = classfile.getConstPool(); + int c = iter.byteAt(pos); + if (c != Opcode.PUTFIELD) { + return pos; + } + int index = iter.u16bitAt(pos + 1); + String fieldName = cp.getFieldrefName(index); + String className = cp.getFieldrefClassName(index); + if ((!classfile.getName().equals(className)) + || (!writableFields.containsKey(fieldName))) { + return pos; + } + String desc = "(" + (String) writableFields.get(fieldName) + ")V"; + int write_method_index = cp.addMethodrefInfo(cp.getThisClassInfo(), + EACH_WRITE_METHOD_PREFIX + fieldName, desc); + iter.writeByte(Opcode.INVOKEVIRTUAL, pos); + iter.write16bit(write_method_index, pos + 1); + return pos; + } + + private static void addInvokeFieldHandlerMethod(ClassFile classfile, + Bytecode code, String typeName, boolean isReadMethod) { + ConstPool cp = classfile.getConstPool(); + // invokeinterface + int callback_type_index = cp.addClassInfo(FIELD_HANDLER_TYPE_NAME); + if ((typeName.charAt(0) == 'L') + && (typeName.charAt(typeName.length() - 1) == ';') + || (typeName.charAt(0) == '[')) { + // reference type + int indexOfL = typeName.indexOf('L'); + String type; + if (indexOfL == 0) { + // not array + type = typeName.substring(1, typeName.length() - 1); + type = type.replace('/', '.'); + } else if (indexOfL == -1) { + // array of primitive type + // do nothing + type = typeName; + } else { + // array of reference type + type = typeName.replace('/', '.'); + } + if (isReadMethod) { + code + .addInvokeinterface( + callback_type_index, + "readObject", + "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/Object;", + 4); + // checkcast + code.addCheckcast(type); + } else { + code + .addInvokeinterface( + callback_type_index, + "writeObject", + "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;", + 5); + // checkcast + code.addCheckcast(type); + } + } else if (typeName.equals("Z")) { + // boolean + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readBoolean", + "(Ljava/lang/Object;Ljava/lang/String;Z)Z", 4); + } else { + code.addInvokeinterface(callback_type_index, "writeBoolean", + "(Ljava/lang/Object;Ljava/lang/String;ZZ)Z", 5); + } + } else if (typeName.equals("B")) { + // byte + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readByte", + "(Ljava/lang/Object;Ljava/lang/String;B)B", 4); + } else { + code.addInvokeinterface(callback_type_index, "writeByte", + "(Ljava/lang/Object;Ljava/lang/String;BB)B", 5); + } + } else if (typeName.equals("C")) { + // char + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readChar", + "(Ljava/lang/Object;Ljava/lang/String;C)C", 4); + } else { + code.addInvokeinterface(callback_type_index, "writeChar", + "(Ljava/lang/Object;Ljava/lang/String;CC)C", 5); + } + } else if (typeName.equals("I")) { + // int + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readInt", + "(Ljava/lang/Object;Ljava/lang/String;I)I", 4); + } else { + code.addInvokeinterface(callback_type_index, "writeInt", + "(Ljava/lang/Object;Ljava/lang/String;II)I", 5); + } + } else if (typeName.equals("S")) { + // short + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readShort", + "(Ljava/lang/Object;Ljava/lang/String;S)S", 4); + } else { + code.addInvokeinterface(callback_type_index, "writeShort", + "(Ljava/lang/Object;Ljava/lang/String;SS)S", 5); + } + } else if (typeName.equals("D")) { + // double + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readDouble", + "(Ljava/lang/Object;Ljava/lang/String;D)D", 5); + } else { + code.addInvokeinterface(callback_type_index, "writeDouble", + "(Ljava/lang/Object;Ljava/lang/String;DD)D", 7); + } + } else if (typeName.equals("F")) { + // float + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readFloat", + "(Ljava/lang/Object;Ljava/lang/String;F)F", 4); + } else { + code.addInvokeinterface(callback_type_index, "writeFloat", + "(Ljava/lang/Object;Ljava/lang/String;FF)F", 5); + } + } else if (typeName.equals("J")) { + // long + if (isReadMethod) { + code.addInvokeinterface(callback_type_index, "readLong", + "(Ljava/lang/Object;Ljava/lang/String;J)J", 5); + } else { + code.addInvokeinterface(callback_type_index, "writeLong", + "(Ljava/lang/Object;Ljava/lang/String;JJ)J", 7); + } + } else { + // bad type + throw new RuntimeException("bad type: " + typeName); + } + } + + private static void addTypeDependDataLoad(Bytecode code, String typeName, + int i) { + if ((typeName.charAt(0) == 'L') + && (typeName.charAt(typeName.length() - 1) == ';') + || (typeName.charAt(0) == '[')) { + // reference type + code.addAload(i); + } else if (typeName.equals("Z") || typeName.equals("B") + || typeName.equals("C") || typeName.equals("I") + || typeName.equals("S")) { + // boolean, byte, char, int, short + code.addIload(i); + } else if (typeName.equals("D")) { + // double + code.addDload(i); + } else if (typeName.equals("F")) { + // float + code.addFload(i); + } else if (typeName.equals("J")) { + // long + code.addLload(i); + } else { + // bad type + throw new RuntimeException("bad type: " + typeName); + } + } + + private static void addTypeDependDataStore(Bytecode code, String typeName, + int i) { + if ((typeName.charAt(0) == 'L') + && (typeName.charAt(typeName.length() - 1) == ';') + || (typeName.charAt(0) == '[')) { + // reference type + code.addAstore(i); + } else if (typeName.equals("Z") || typeName.equals("B") + || typeName.equals("C") || typeName.equals("I") + || typeName.equals("S")) { + // boolean, byte, char, int, short + code.addIstore(i); + } else if (typeName.equals("D")) { + // double + code.addDstore(i); + } else if (typeName.equals("F")) { + // float + code.addFstore(i); + } else if (typeName.equals("J")) { + // long + code.addLstore(i); + } else { + // bad type + throw new RuntimeException("bad type: " + typeName); + } + } + + private static void addTypeDependDataReturn(Bytecode code, String typeName) { + if ((typeName.charAt(0) == 'L') + && (typeName.charAt(typeName.length() - 1) == ';') + || (typeName.charAt(0) == '[')) { + // reference type + code.addOpcode(Opcode.ARETURN); + } else if (typeName.equals("Z") || typeName.equals("B") + || typeName.equals("C") || typeName.equals("I") + || typeName.equals("S")) { + // boolean, byte, char, int, short + code.addOpcode(Opcode.IRETURN); + } else if (typeName.equals("D")) { + // double + code.addOpcode(Opcode.DRETURN); + } else if (typeName.equals("F")) { + // float + code.addOpcode(Opcode.FRETURN); + } else if (typeName.equals("J")) { + // long + code.addOpcode(Opcode.LRETURN); + } else { + // bad type + throw new RuntimeException("bad type: " + typeName); + } + } + +} diff --git a/src/org/hibernate/bytecode/javassist/InstantiationOptimizerAdapter.java b/src/org/hibernate/bytecode/javassist/InstantiationOptimizerAdapter.java new file mode 100644 index 0000000000..a8c59be5b9 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/InstantiationOptimizerAdapter.java @@ -0,0 +1,32 @@ +package org.hibernate.bytecode.javassist; + +import org.hibernate.bytecode.ReflectionOptimizer; +import org.hibernate.InstantiationException; + +import java.io.Serializable; + +/** + * The {@link ReflectionOptimizer.InstantiationOptimizer} implementation for Javassist + * which simply acts as an adpater to the {@link FastClass} class. + * + * @author Steve Ebersole + */ +public class InstantiationOptimizerAdapter implements ReflectionOptimizer.InstantiationOptimizer, Serializable { + private final FastClass fastClass; + + public InstantiationOptimizerAdapter(FastClass fastClass) { + this.fastClass = fastClass; + } + + public Object newInstance() { + try { + return fastClass.newInstance(); + } + catch ( Throwable t ) { + throw new InstantiationException( + "Could not instantiate entity with Javassist optimizer: ", + fastClass.getJavaClass(), t + ); + } + } +} diff --git a/src/org/hibernate/bytecode/javassist/JavassistClassTransformer.java b/src/org/hibernate/bytecode/javassist/JavassistClassTransformer.java new file mode 100644 index 0000000000..95f77747c8 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/JavassistClassTransformer.java @@ -0,0 +1,111 @@ +//$Id: $ +package org.hibernate.bytecode.javassist; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.security.ProtectionDomain; + +import javassist.bytecode.ClassFile; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.bytecode.AbstractClassTransformerImpl; +import org.hibernate.bytecode.util.ClassFilter; + +/** + * Enhance the classes allowing them to implements InterceptFieldEnabled + * This interface is then used by Hibernate for some optimizations. + * + * @author Emmanuel Bernard + * @author Steve Ebersole + */ +public class JavassistClassTransformer extends AbstractClassTransformerImpl { + + private static Log log = LogFactory.getLog( JavassistClassTransformer.class.getName() ); + + public JavassistClassTransformer(ClassFilter classFilter, org.hibernate.bytecode.util.FieldFilter fieldFilter) { + super( classFilter, fieldFilter ); + } + + protected byte[] doTransform( + ClassLoader loader, + String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, + byte[] classfileBuffer) { + ClassFile classfile; + try { + // WARNING: classfile only + classfile = new ClassFile( new DataInputStream( new ByteArrayInputStream( classfileBuffer ) ) ); + } + catch (IOException e) { + log.error( "Unable to build enhancement metamodel for " + className ); + return classfileBuffer; + } + FieldTransformer transformer = getFieldTransformer( classfile ); + if ( transformer != null ) { + if ( log.isDebugEnabled() ) { + log.debug( "Enhancing " + className ); + } + DataOutputStream out = null; + try { + transformer.transform( classfile ); + ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); + out = new DataOutputStream( byteStream ); + classfile.write( out ); + return byteStream.toByteArray(); + } + catch (Exception e) { + log.error( "Unable to transform class", e ); + throw new HibernateException( "Unable to transform class: " + e.getMessage() ); + } + finally { + try { + if ( out != null ) out.close(); + } + catch (IOException e) { + //swallow + } + } + } + return classfileBuffer; + } + + protected FieldTransformer getFieldTransformer(final ClassFile classfile) { + if ( alreadyInstrumented( classfile ) ) { + return null; + } + return new FieldTransformer( + new FieldFilter() { + public boolean handleRead(String desc, String name) { + return fieldFilter.shouldInstrumentField( classfile.getName(), name ); + } + + public boolean handleWrite(String desc, String name) { + return fieldFilter.shouldInstrumentField( classfile.getName(), name ); + } + + public boolean handleReadAccess(String fieldOwnerClassName, String fieldName) { + return fieldFilter.shouldTransformFieldAccess( classfile.getName(), fieldOwnerClassName, fieldName ); + } + + public boolean handleWriteAccess(String fieldOwnerClassName, String fieldName) { + return fieldFilter.shouldTransformFieldAccess( classfile.getName(), fieldOwnerClassName, fieldName ); + } + } + ); + } + + private boolean alreadyInstrumented(ClassFile classfile) { + String[] intfs = classfile.getInterfaces(); + for ( int i = 0; i < intfs.length; i++ ) { + if ( FieldHandled.class.getName().equals( intfs[i] ) ) { + return true; + } + } + return false; + } +} diff --git a/src/org/hibernate/bytecode/javassist/ProxyFactoryFactoryImpl.java b/src/org/hibernate/bytecode/javassist/ProxyFactoryFactoryImpl.java new file mode 100644 index 0000000000..13c4ff53a4 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/ProxyFactoryFactoryImpl.java @@ -0,0 +1,123 @@ +package org.hibernate.bytecode.javassist; + +import org.hibernate.bytecode.ProxyFactoryFactory; +import org.hibernate.bytecode.BasicProxyFactory; +import org.hibernate.proxy.ProxyFactory; +import org.hibernate.proxy.pojo.javassist.JavassistProxyFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import javassist.util.proxy.MethodFilter; +import javassist.util.proxy.ProxyObject; +import javassist.util.proxy.MethodHandler; + +import java.lang.reflect.Method; +import java.util.HashMap; + +/** + * A factory for Javassist-based {@link ProxyFactory} instances. + * + * @author Steve Ebersole + */ +public class ProxyFactoryFactoryImpl implements ProxyFactoryFactory { + + /** + * Builds a Javassist-based proxy factory. + * + * @return a new Javassist-based proxy factory. + */ + public ProxyFactory buildProxyFactory() { + return new JavassistProxyFactory(); + } + + public BasicProxyFactory buildBasicProxyFactory(Class superClass, Class[] interfaces) { + return new BasicProxyFactoryImpl( superClass, interfaces ); + } + + private static class BasicProxyFactoryImpl implements BasicProxyFactory { + private final Class proxyClass; + + public BasicProxyFactoryImpl(Class superClass, Class[] interfaces) { + if ( superClass == null && ( interfaces == null || interfaces.length < 1 ) ) { + throw new AssertionFailure( "attempting to build proxy without any superclass or interfaces" ); + } + javassist.util.proxy.ProxyFactory factory = new javassist.util.proxy.ProxyFactory(); + factory.setFilter( FINALIZE_FILTER ); + if ( superClass != null ) { + factory.setSuperclass( superClass ); + } + if ( interfaces != null && interfaces.length > 0 ) { + factory.setInterfaces( interfaces ); + } + proxyClass = factory.createClass(); + } + + public Object getProxy() { + try { + ProxyObject proxy = ( ProxyObject ) proxyClass.newInstance(); + proxy.setHandler( new PassThroughHandler( proxy, proxyClass.getName() ) ); + return proxy; + } + catch ( Throwable t ) { + throw new HibernateException( "Unable to instantiated proxy instance" ); + } + } + + public boolean isInstance(Object object) { + return proxyClass.isInstance( object ); + } + } + + private static final MethodFilter FINALIZE_FILTER = new MethodFilter() { + public boolean isHandled(Method m) { + // skip finalize methods + return !( m.getParameterTypes().length == 0 && m.getName().equals( "finalize" ) ); + } + }; + + private static class PassThroughHandler implements MethodHandler { + private HashMap data = new HashMap(); + private final Object proxiedObject; + private final String proxiedClassName; + + public PassThroughHandler(Object proxiedObject, String proxiedClassName) { + this.proxiedObject = proxiedObject; + this.proxiedClassName = proxiedClassName; + } + + public Object invoke( + Object object, + Method method, + Method method1, + Object[] args) throws Exception { + String name = method.getName(); + if ( "toString".equals( name ) ) { + return proxiedClassName + "@" + System.identityHashCode( object ); + } + else if ( "equals".equals( name ) ) { + return proxiedObject == object ? Boolean.TRUE : Boolean.FALSE; + } + else if ( "hashCode".equals( name ) ) { + return new Integer( System.identityHashCode( object ) ); + } + boolean hasGetterSignature = method.getParameterTypes().length == 0 && method.getReturnType() != null; + boolean hasSetterSignature = method.getParameterTypes().length == 1 && ( method.getReturnType() == null || method.getReturnType() == void.class ); + if ( name.startsWith( "get" ) && hasGetterSignature ) { + String propName = name.substring( 3 ); + return data.get( propName ); + } + else if ( name.startsWith( "is" ) && hasGetterSignature ) { + String propName = name.substring( 2 ); + return data.get( propName ); + } + else if ( name.startsWith( "set" ) && hasSetterSignature) { + String propName = name.substring( 3 ); + data.put( propName, args[0] ); + return null; + } + else { + // todo : what else to do here? + return null; + } + } + } +} diff --git a/src/org/hibernate/bytecode/javassist/ReflectionOptimizerImpl.java b/src/org/hibernate/bytecode/javassist/ReflectionOptimizerImpl.java new file mode 100644 index 0000000000..ce553f20e7 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/ReflectionOptimizerImpl.java @@ -0,0 +1,32 @@ +package org.hibernate.bytecode.javassist; + +import org.hibernate.bytecode.ReflectionOptimizer; + +import java.io.Serializable; + +/** + * ReflectionOptimizer implementation for Javassist. + * + * @author Steve Ebersole + */ +public class ReflectionOptimizerImpl implements ReflectionOptimizer, Serializable { + + private final InstantiationOptimizer instantiationOptimizer; + private final AccessOptimizer accessOptimizer; + + public ReflectionOptimizerImpl( + InstantiationOptimizer instantiationOptimizer, + AccessOptimizer accessOptimizer) { + this.instantiationOptimizer = instantiationOptimizer; + this.accessOptimizer = accessOptimizer; + } + + public InstantiationOptimizer getInstantiationOptimizer() { + return instantiationOptimizer; + } + + public AccessOptimizer getAccessOptimizer() { + return accessOptimizer; + } + +} diff --git a/src/org/hibernate/bytecode/javassist/TransformingClassLoader.java b/src/org/hibernate/bytecode/javassist/TransformingClassLoader.java new file mode 100644 index 0000000000..2e373f8e73 --- /dev/null +++ b/src/org/hibernate/bytecode/javassist/TransformingClassLoader.java @@ -0,0 +1,57 @@ +package org.hibernate.bytecode.javassist; + +import javassist.ClassPool; +import javassist.NotFoundException; +import javassist.CtClass; +import javassist.CannotCompileException; +import org.hibernate.HibernateException; + +import java.io.IOException; +import java.util.ArrayList; + +/** + * @author Steve Ebersole + */ +public class TransformingClassLoader extends ClassLoader { + private ClassLoader parent; + private ClassPool classPool; + + /*package*/ TransformingClassLoader(ClassLoader parent, String[] classpath) { + this.parent = parent; + classPool = new ClassPool( true ); + for ( int i = 0; i < classpath.length; i++ ) { + try { + classPool.appendClassPath( classpath[i] ); + } + catch ( NotFoundException e ) { + throw new HibernateException( + "Unable to resolve requested classpath for transformation [" + + classpath[i] + "] : " + e.getMessage() + ); + } + } + } + + protected Class findClass(String name) throws ClassNotFoundException { + try { + CtClass cc = classPool.get( name ); + // todo : modify the class definition if not already transformed... + byte[] b = cc.toBytecode(); + return defineClass( name, b, 0, b.length ); + } + catch ( NotFoundException e ) { + throw new ClassNotFoundException(); + } + catch ( IOException e ) { + throw new ClassNotFoundException(); + } + catch ( CannotCompileException e ) { + throw new ClassNotFoundException(); + } + } + + public void release() { + classPool = null; + parent = null; + } +} diff --git a/src/org/hibernate/bytecode/package.html b/src/org/hibernate/bytecode/package.html new file mode 100644 index 0000000000..f9c8121809 --- /dev/null +++ b/src/org/hibernate/bytecode/package.html @@ -0,0 +1,38 @@ + + + +

    + This package defines the API for plugging in bytecode libraries + for usage by Hibernate. Hibernate uses these bytecode libraries + in three scenarios:

      +
    1. + Reflection optimization - to speed up the performance of + POJO entity and component conctruction and field/property access +
    2. +
    3. + Proxy generation - runtime building of proxies used for + deferred loading of lazy entities +
    4. +
    5. + Field-level interception - build-time instrumentation of entity + classes for the purpose of intercepting field-level access (read/write) + for both lazy loading and dirty tracking. +
    6. +
    +

    +

    + Currently, both CGLIB and Javassist are supported out-of-the-box. +

    +

    + Note that for field-level interception, simply plugging in a new {@link BytecodeProvider} + is not enough for Hibernate to be able to recognize new providers. You would additionally + need to make appropriate code changes to the {@link org.hibernate.intercept.Helper} + class. This is because the detection of these enhanced classes is needed in a static + environment (i.e. outside the scope of any {@link org.hibernate.SessionFactory}. +

    +

    + Note that in the current form the ability to specify a different bytecode provider + is actually considered a global settings (global to the JVM). +

    + + diff --git a/src/org/hibernate/bytecode/util/BasicClassFilter.java b/src/org/hibernate/bytecode/util/BasicClassFilter.java new file mode 100644 index 0000000000..64e179aff7 --- /dev/null +++ b/src/org/hibernate/bytecode/util/BasicClassFilter.java @@ -0,0 +1,59 @@ +package org.hibernate.bytecode.util; + +import java.util.Set; +import java.util.HashSet; + +/** + * BasicClassFilter provides class filtering based on a series of packages to + * be included and/or a series of explicit class names to be included. If + * neither is specified, then no restrictions are applied. + * + * @author Steve Ebersole + */ +public class BasicClassFilter implements ClassFilter { + private final String[] includedPackages; + private final Set includedClassNames = new HashSet(); + private final boolean isAllEmpty; + + public BasicClassFilter() { + this( null, null ); + } + + public BasicClassFilter(String[] includedPackages, String[] includedClassNames) { + this.includedPackages = includedPackages; + if ( includedClassNames != null ) { + for ( int i = 0; i < includedClassNames.length; i++ ) { + this.includedClassNames.add( includedClassNames[i] ); + } + } + + isAllEmpty = ( this.includedPackages == null || this.includedPackages.length == 0 ) + && ( this.includedClassNames.isEmpty() ); + } + + public boolean shouldInstrumentClass(String className) { + if ( isAllEmpty ) { + return true; + } + else if ( includedClassNames.contains( className ) ) { + return true; + } + else if ( isInIncludedPackage( className ) ) { + return true; + } + else { + return false; + } + } + + private boolean isInIncludedPackage(String className) { + if ( includedPackages != null ) { + for ( int i = 0; i < includedPackages.length; i++ ) { + if ( className.startsWith( includedPackages[i] ) ) { + return true; + } + } + } + return false; + } +} diff --git a/src/org/hibernate/bytecode/util/ByteCodeHelper.java b/src/org/hibernate/bytecode/util/ByteCodeHelper.java new file mode 100644 index 0000000000..8ada73f7e9 --- /dev/null +++ b/src/org/hibernate/bytecode/util/ByteCodeHelper.java @@ -0,0 +1,78 @@ +package org.hibernate.bytecode.util; + +import java.io.InputStream; +import java.io.IOException; +import java.io.File; +import java.io.FileInputStream; +import java.io.ByteArrayOutputStream; +import java.io.BufferedInputStream; +import java.util.zip.ZipInputStream; + +/** + * A helper for reading byte code from various input sources. + * + * @author Steve Ebersole + */ +public class ByteCodeHelper { + private ByteCodeHelper() { + } + + /** + * Reads class byte array info from the given input stream. + *

    + * The stream is closed within this method! + * + * @param inputStream + * @return + * @throws IOException + */ + public static byte[] readByteCode(InputStream inputStream) throws IOException { + if ( inputStream == null ) { + throw new IOException( "null input stream" ); + } + + byte[] buffer = new byte[409600]; + byte[] classBytes = new byte[0]; + int r = 0; + + try { + r = inputStream.read( buffer ); + while ( r >= buffer.length ) { + byte[] temp = new byte[ classBytes.length + buffer.length ]; + System.arraycopy( classBytes, 0, temp, 0, classBytes.length ); + System.arraycopy( buffer, 0, temp, classBytes.length, buffer.length ); + classBytes = temp; + } + if ( r != -1 ) { + byte[] temp = new byte[ classBytes.length + r ]; + System.arraycopy( classBytes, 0, temp, 0, classBytes.length ); + System.arraycopy( buffer, 0, temp, classBytes.length, r ); + classBytes = temp; + } + } + finally { + try { + inputStream.close(); + } + catch (IOException ignore) { + // intentionally empty + } + } + + return classBytes; + } + + public static byte[] readByteCode(File file) throws IOException { + return ByteCodeHelper.readByteCode( new FileInputStream( file ) ); + } + + public static byte[] readByteCode(ZipInputStream zip) throws IOException { + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + InputStream in = new BufferedInputStream( zip ); + int b; + while ( ( b = in.read() ) != -1 ) { + bout.write( b ); + } + return bout.toByteArray(); + } +} diff --git a/src/org/hibernate/bytecode/util/ClassDescriptor.java b/src/org/hibernate/bytecode/util/ClassDescriptor.java new file mode 100644 index 0000000000..5e2601aa4b --- /dev/null +++ b/src/org/hibernate/bytecode/util/ClassDescriptor.java @@ -0,0 +1,30 @@ +package org.hibernate.bytecode.util; + +/** + * Contract describing the information Hibernate needs in terms of instrumenting + * a class, either via ant task or dynamic classloader. + * + * @author Steve Ebersole + */ +public interface ClassDescriptor { + /** + * The name of the class. + * + * @return The class name. + */ + public String getName(); + + /** + * Determine if the class is already instrumented. + * + * @return True if already instrumented; false otherwise. + */ + public boolean isInstrumented(); + + /** + * The bytes making up the class' bytecode. + * + * @return The bytecode bytes. + */ + public byte[] getBytes(); +} diff --git a/src/org/hibernate/bytecode/util/ClassFilter.java b/src/org/hibernate/bytecode/util/ClassFilter.java new file mode 100644 index 0000000000..9418fe4bb1 --- /dev/null +++ b/src/org/hibernate/bytecode/util/ClassFilter.java @@ -0,0 +1,10 @@ +package org.hibernate.bytecode.util; + +/** + * Used to determine whether a class should be instrumented. + * + * @author Steve Ebersole + */ +public interface ClassFilter { + public boolean shouldInstrumentClass(String className); +} diff --git a/src/org/hibernate/bytecode/util/FieldFilter.java b/src/org/hibernate/bytecode/util/FieldFilter.java new file mode 100644 index 0000000000..6625120b1e --- /dev/null +++ b/src/org/hibernate/bytecode/util/FieldFilter.java @@ -0,0 +1,29 @@ +package org.hibernate.bytecode.util; + +/** + * Used to determine whether a field reference should be instrumented. + * + * @author Steve Ebersole + */ +public interface FieldFilter { + /** + * Should this field definition be instrumented? + * + * @param className The name of the class currently being processed + * @param fieldName The name of the field being checked. + * @return True if we should instrument this field. + */ + public boolean shouldInstrumentField(String className, String fieldName); + + /** + * Should we instrument *access to* the given field. This differs from + * {@link #shouldInstrumentField} in that here we are talking about a particular usage of + * a field. + * + * @param transformingClassName The class currently being transformed. + * @param fieldOwnerClassName The name of the class owning this field being checked. + * @param fieldName The name of the field being checked. + * @return True if this access should be transformed. + */ + public boolean shouldTransformFieldAccess(String transformingClassName, String fieldOwnerClassName, String fieldName); +} diff --git a/src/org/hibernate/cache/AbstractJndiBoundCacheProvider.java b/src/org/hibernate/cache/AbstractJndiBoundCacheProvider.java new file mode 100644 index 0000000000..ba013736f1 --- /dev/null +++ b/src/org/hibernate/cache/AbstractJndiBoundCacheProvider.java @@ -0,0 +1,86 @@ +// $Id$ +package org.hibernate.cache; + +import java.util.Properties; + +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.naming.NamingException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.cfg.Environment; +import org.hibernate.util.NamingHelper; +import org.hibernate.util.StringHelper; + +/** + * Support for CacheProvider implementations which are backed by caches bound + * into JNDI namespace. + * + * @author Steve Ebersole + */ +public abstract class AbstractJndiBoundCacheProvider implements CacheProvider { + + private static final Log log = LogFactory.getLog( AbstractJndiBoundCacheProvider.class ); + private Object cache; + + protected void prepare(Properties properties) { + // Do nothing; subclasses may override. + } + + protected void release() { + // Do nothing; subclasses may override. + } + + /** + * Callback to perform any necessary initialization of the underlying cache implementation during SessionFactory + * construction. + * + * @param properties current configuration settings. + */ + public final void start(Properties properties) throws CacheException { + String jndiNamespace = properties.getProperty( Environment.CACHE_NAMESPACE ); + if ( StringHelper.isEmpty( jndiNamespace ) ) { + throw new CacheException( "No JNDI namespace specified for cache" ); + } + cache = locateCache( jndiNamespace, NamingHelper.getJndiProperties( properties ) ); + prepare( properties ); + } + + /** + * Callback to perform any necessary cleanup of the underlying cache + * implementation during SessionFactory.close(). + */ + public final void stop() { + release(); + cache = null; + } + + private Object locateCache(String jndiNamespace, Properties jndiProperties) { + + Context ctx = null; + try { + ctx = new InitialContext( jndiProperties ); + return ctx.lookup( jndiNamespace ); + } + catch (NamingException ne) { + String msg = "Unable to retreive Cache from JNDI [" + jndiNamespace + "]"; + log.info( msg, ne ); + throw new CacheException( msg ); + } + finally { + if ( ctx != null ) { + try { + ctx.close(); + } + catch( NamingException ne ) { + log.info( "Unable to release initial context", ne ); + } + } + } + } + + public Object getCache() { + return cache; + } +} diff --git a/src/org/hibernate/cache/Cache.java b/src/org/hibernate/cache/Cache.java new file mode 100644 index 0000000000..186d00c674 --- /dev/null +++ b/src/org/hibernate/cache/Cache.java @@ -0,0 +1,106 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Map; + +/** + * Implementors define a caching algorithm. All implementors + * must be threadsafe. + */ +public interface Cache { + /** + * Get an item from the cache + * @param key + * @return the cached object or null + * @throws CacheException + */ + public Object read(Object key) throws CacheException; + /** + * Get an item from the cache, nontransactionally + * @param key + * @return the cached object or null + * @throws CacheException + */ + public Object get(Object key) throws CacheException; + /** + * Add an item to the cache, nontransactionally, with + * failfast semantics + * @param key + * @param value + * @throws CacheException + */ + public void put(Object key, Object value) throws CacheException; + /** + * Add an item to the cache + * @param key + * @param value + * @throws CacheException + */ + public void update(Object key, Object value) throws CacheException; + /** + * Remove an item from the cache + */ + public void remove(Object key) throws CacheException; + /** + * Clear the cache + */ + public void clear() throws CacheException; + /** + * Clean up + */ + public void destroy() throws CacheException; + /** + * If this is a clustered cache, lock the item + */ + public void lock(Object key) throws CacheException; + /** + * If this is a clustered cache, unlock the item + */ + public void unlock(Object key) throws CacheException; + /** + * Generate a timestamp + */ + public long nextTimestamp(); + /** + * Get a reasonable "lock timeout" + */ + public int getTimeout(); + + /** + * Get the name of the cache region + */ + public String getRegionName(); + + /** + * The number of bytes is this cache region currently consuming in memory. + * + * @return The number of bytes consumed by this region; -1 if unknown or + * unsupported. + */ + public long getSizeInMemory(); + + /** + * The count of entries currently contained in the regions in-memory store. + * + * @return The count of entries in memory; -1 if unknown or unsupported. + */ + public long getElementCountInMemory(); + + /** + * The count of entries currently contained in the regions disk store. + * + * @return The count of entries on disk; -1 if unknown or unsupported. + */ + public long getElementCountOnDisk(); + + /** + * optional operation + */ + public Map toMap(); +} + + + + + + diff --git a/src/org/hibernate/cache/CacheConcurrencyStrategy.java b/src/org/hibernate/cache/CacheConcurrencyStrategy.java new file mode 100644 index 0000000000..0870b8f50b --- /dev/null +++ b/src/org/hibernate/cache/CacheConcurrencyStrategy.java @@ -0,0 +1,177 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Comparator; + +/** + * Implementors manage transactional access to cached data. Transactions + * pass in a timestamp indicating transaction start time. Two different + * implementation patterns are provided for.

    + *

    + * In terms of entity caches, the expected call sequences are:

    + *

    + * In terms of collection caches, all modification actions actually just + * invalidate the entry(s). The call sequence here is: + * {@link #lock} -> {@link #evict} -> {@link #release} + *

    + * Note that, for an asynchronous cache, cache invalidation must be a two + * step process (lock->release, or lock-afterUpdate), since this is the only + * way to guarantee consistency with the database for a nontransactional cache + * implementation. For a synchronous cache, cache invalidation is a single + * step process (evict, or update). Hence, this interface defines a three + * step process, to cater for both models. + *

    + * Note that query result caching does not go through a concurrency strategy; they + * are managed directly against the underlying {@link Cache cache regions}. + */ +public interface CacheConcurrencyStrategy { + + /** + * Attempt to retrieve an object from the cache. Mainly used in attempting + * to resolve entities/collections from the second level cache. + * + * @param key + * @param txTimestamp a timestamp prior to the transaction start time + * @return the cached object or null + * @throws CacheException + */ + public Object get(Object key, long txTimestamp) throws CacheException; + + /** + * Attempt to cache an object, after loading from the database. + * + * @param key + * @param value + * @param txTimestamp a timestamp prior to the transaction start time + * @param version the item version number + * @param versionComparator a comparator used to compare version numbers + * @param minimalPut indicates that the cache should avoid a put is the item is already cached + * @return true if the object was successfully cached + * @throws CacheException + */ + public boolean put( + Object key, + Object value, + long txTimestamp, + Object version, + Comparator versionComparator, + boolean minimalPut) + throws CacheException; + + /** + * We are going to attempt to update/delete the keyed object. This + * method is used by "asynchronous" concurrency strategies. + *

    + * The returned object must be passed back to release(), to release the + * lock. Concurrency strategies which do not support client-visible + * locks may silently return null. + * + * @param key + * @param version + * @throws CacheException + */ + public SoftLock lock(Object key, Object version) throws CacheException; + + /** + * Called after an item has become stale (before the transaction completes). + * This method is used by "synchronous" concurrency strategies. + */ + public void evict(Object key) throws CacheException; + + /** + * Called after an item has been updated (before the transaction completes), + * instead of calling evict(). + * This method is used by "synchronous" concurrency strategies. + */ + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) throws CacheException; + + /** + * Called after an item has been inserted (before the transaction completes), + * instead of calling evict(). + * This method is used by "synchronous" concurrency strategies. + */ + public boolean insert(Object key, Object value, Object currentVersion) throws CacheException; + + + /** + * Called when we have finished the attempted update/delete (which may or + * may not have been successful), after transaction completion. + * This method is used by "asynchronous" concurrency strategies. + * @param key + * @throws CacheException + */ + public void release(Object key, SoftLock lock) throws CacheException; + /** + * Called after an item has been updated (after the transaction completes), + * instead of calling release(). + * This method is used by "asynchronous" concurrency strategies. + */ + public boolean afterUpdate(Object key, Object value, Object version, SoftLock lock) + throws CacheException; + /** + * Called after an item has been inserted (after the transaction completes), + * instead of calling release(). + * This method is used by "asynchronous" concurrency strategies. + */ + public boolean afterInsert(Object key, Object value, Object version) + throws CacheException; + + + /** + * Evict an item from the cache immediately (without regard for transaction + * isolation). + * @param key + * @throws CacheException + */ + public void remove(Object key) throws CacheException; + /** + * Evict all items from the cache immediately. + * @throws CacheException + */ + public void clear() throws CacheException; + /** + * Clean up all resources. + */ + public void destroy(); + /** + * Set the underlying cache implementation. + * @param cache + */ + public void setCache(Cache cache); + + /** + * Marker interface, denoting a client-visible "soft lock" + * on a cached item. + * @author Gavin King + */ + public static interface SoftLock {} + + /** + * Get the cache region name + */ + public String getRegionName(); + + /** + * Get the wrapped cache implementation + */ + public Cache getCache(); +} + + + + + + diff --git a/src/org/hibernate/cache/CacheException.java b/src/org/hibernate/cache/CacheException.java new file mode 100644 index 0000000000..3540974b9b --- /dev/null +++ b/src/org/hibernate/cache/CacheException.java @@ -0,0 +1,23 @@ +//$Id$ +package org.hibernate.cache; + +import org.hibernate.HibernateException; + +/** + * Something went wrong in the cache + */ +public class CacheException extends HibernateException { + + public CacheException(String s) { + super(s); + } + + public CacheException(String s, Throwable e) { + super(s, e); + } + + public CacheException(Throwable e) { + super(e); + } + +} diff --git a/src/org/hibernate/cache/CacheFactory.java b/src/org/hibernate/cache/CacheFactory.java new file mode 100644 index 0000000000..52bf67236a --- /dev/null +++ b/src/org/hibernate/cache/CacheFactory.java @@ -0,0 +1,71 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Properties; + +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.cfg.Settings; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * @author Gavin King + */ +public final class CacheFactory { + + private static final Log log = LogFactory.getLog(CacheFactory.class); + + private CacheFactory() {} + + public static final String READ_ONLY = "read-only"; + public static final String READ_WRITE = "read-write"; + public static final String NONSTRICT_READ_WRITE = "nonstrict-read-write"; + public static final String TRANSACTIONAL = "transactional"; + + public static CacheConcurrencyStrategy createCache( + final String concurrencyStrategy, + String regionName, + final boolean mutable, + final Settings settings, + final Properties properties) + throws HibernateException { + + if ( concurrencyStrategy==null || !settings.isSecondLevelCacheEnabled() ) return null; //no cache + + String prefix = settings.getCacheRegionPrefix(); + if ( prefix!=null ) regionName = prefix + '.' + regionName; + + if ( log.isDebugEnabled() ) log.debug("instantiating cache region: " + regionName + " usage strategy: " + concurrencyStrategy); + + final CacheConcurrencyStrategy ccs; + if ( concurrencyStrategy.equals(READ_ONLY) ) { + if (mutable) log.warn( "read-only cache configured for mutable class: " + regionName ); + ccs = new ReadOnlyCache(); + } + else if ( concurrencyStrategy.equals(READ_WRITE) ) { + ccs = new ReadWriteCache(); + } + else if ( concurrencyStrategy.equals(NONSTRICT_READ_WRITE) ) { + ccs = new NonstrictReadWriteCache(); + } + else if ( concurrencyStrategy.equals(TRANSACTIONAL) ) { + ccs = new TransactionalCache(); + } + else { + throw new MappingException("cache usage attribute should be read-write, read-only, nonstrict-read-write or transactional"); + } + + final Cache impl; + try { + impl = settings.getCacheProvider().buildCache(regionName, properties); + } + catch (CacheException e) { + throw new HibernateException( "Could not instantiate cache implementation", e ); + } + ccs.setCache(impl); + + return ccs; + } + +} diff --git a/src/org/hibernate/cache/CacheKey.java b/src/org/hibernate/cache/CacheKey.java new file mode 100755 index 0000000000..274915487d --- /dev/null +++ b/src/org/hibernate/cache/CacheKey.java @@ -0,0 +1,72 @@ +//$Id$ +package org.hibernate.cache; + +import java.io.Serializable; + +import org.hibernate.EntityMode; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Allows multiple entity classes / collection roles to be + * stored in the same cache region. Also allows for composite + * keys which do not properly implement equals()/hashCode(). + * + * @author Gavin King + */ +public class CacheKey implements Serializable { + private final Serializable key; + private final Type type; + private final String entityOrRoleName; + private final EntityMode entityMode; + private final int hashCode; + + /** + * Construct a new key for a collection or entity instance. + * Note that an entity name should always be the root entity + * name, not a subclass entity name. + * + * @param id The identifier associated with the cached data + * @param type The Hibernate type mapping + * @param entityOrRoleName The entity or collection-role name. + * @param entityMode The entiyt mode of the originating session + * @param factory The session factory for which we are caching + */ + public CacheKey( + final Serializable id, + final Type type, + final String entityOrRoleName, + final EntityMode entityMode, + final SessionFactoryImplementor factory) { + this.key = id; + this.type = type; + this.entityOrRoleName = entityOrRoleName; + this.entityMode = entityMode; + hashCode = type.getHashCode( key, entityMode, factory ); + } + + //Mainly for OSCache + public String toString() { + return entityOrRoleName + '#' + key.toString();//"CacheKey#" + type.toString(key, sf); + } + + public boolean equals(Object other) { + if ( !(other instanceof CacheKey) ) return false; + CacheKey that = (CacheKey) other; + return entityOrRoleName.equals( that.entityOrRoleName ) + && type.isEqual( key, that.key, entityMode ); + } + + public int hashCode() { + return hashCode; + } + + public Serializable getKey() { + return key; + } + + public String getEntityOrRoleName() { + return entityOrRoleName; + } + +} diff --git a/src/org/hibernate/cache/CacheProvider.java b/src/org/hibernate/cache/CacheProvider.java new file mode 100644 index 0000000000..bbfb7def54 --- /dev/null +++ b/src/org/hibernate/cache/CacheProvider.java @@ -0,0 +1,43 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Properties; + +/** + * Support for pluggable caches. + * + * @author Gavin King + */ +public interface CacheProvider { + + /** + * Configure the cache + * + * @param regionName the name of the cache region + * @param properties configuration settings + * @throws CacheException + */ + public Cache buildCache(String regionName, Properties properties) throws CacheException; + + /** + * Generate a timestamp + */ + public long nextTimestamp(); + + /** + * Callback to perform any necessary initialization of the underlying cache implementation + * during SessionFactory construction. + * + * @param properties current configuration settings. + */ + public void start(Properties properties) throws CacheException; + + /** + * Callback to perform any necessary cleanup of the underlying cache implementation + * during SessionFactory.close(). + */ + public void stop(); + + public boolean isMinimalPutsEnabledByDefault(); + +} diff --git a/src/org/hibernate/cache/EhCache.java b/src/org/hibernate/cache/EhCache.java new file mode 100644 index 0000000000..981741810c --- /dev/null +++ b/src/org/hibernate/cache/EhCache.java @@ -0,0 +1,275 @@ +//$Id$ +/** + * Copyright 2003-2006 Greg Luck, Jboss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.hibernate.cache; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import net.sf.ehcache.CacheManager; +import net.sf.ehcache.Element; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * EHCache plugin for Hibernate + *

    + * EHCache uses a {@link net.sf.ehcache.store.MemoryStore} and a + * {@link net.sf.ehcache.store.DiskStore}. + * The {@link net.sf.ehcache.store.DiskStore} requires that both keys and values be {@link java.io.Serializable}. + * However the MemoryStore does not and in ehcache-1.2 nonSerializable Objects are permitted. They are discarded + * if an attempt it made to overflow them to Disk or to replicate them to remote cache peers. + * + * @author Greg Luck + * @author Emmanuel Bernard + */ +public class EhCache implements Cache { + private static final Log log = LogFactory.getLog( EhCache.class ); + + private static final int SIXTY_THOUSAND_MS = 60000; + + private net.sf.ehcache.Cache cache; + + /** + * Creates a new Hibernate pluggable cache based on a cache name. + *

    + * + * @param cache The underlying EhCache instance to use. + */ + public EhCache(net.sf.ehcache.Cache cache) { + this.cache = cache; + } + + /** + * Gets a value of an element which matches the given key. + * + * @param key the key of the element to return. + * @return The value placed into the cache with an earlier put, or null if not found or expired + * @throws CacheException + */ + public Object get(Object key) throws CacheException { + try { + if ( log.isDebugEnabled() ) { + log.debug( "key: " + key ); + } + if ( key == null ) { + return null; + } + else { + Element element = cache.get( key ); + if ( element == null ) { + if ( log.isDebugEnabled() ) { + log.debug( "Element for " + key + " is null" ); + } + return null; + } + else { + return element.getObjectValue(); + } + } + } + catch (net.sf.ehcache.CacheException e) { + throw new CacheException( e ); + } + } + + public Object read(Object key) throws CacheException { + return get( key ); + } + + + /** + * Puts an object into the cache. + * + * @param key a key + * @param value a value + * @throws CacheException if the {@link CacheManager} + * is shutdown or another {@link Exception} occurs. + */ + public void update(Object key, Object value) throws CacheException { + put( key, value ); + } + + /** + * Puts an object into the cache. + * + * @param key a key + * @param value a value + * @throws CacheException if the {@link CacheManager} + * is shutdown or another {@link Exception} occurs. + */ + public void put(Object key, Object value) throws CacheException { + try { + Element element = new Element( key, value ); + cache.put( element ); + } + catch (IllegalArgumentException e) { + throw new CacheException( e ); + } + catch (IllegalStateException e) { + throw new CacheException( e ); + } + catch (net.sf.ehcache.CacheException e) { + throw new CacheException( e ); + } + + } + + /** + * Removes the element which matches the key. + *

    + * If no element matches, nothing is removed and no Exception is thrown. + * + * @param key the key of the element to remove + * @throws CacheException + */ + public void remove(Object key) throws CacheException { + try { + cache.remove( key ); + } + catch (ClassCastException e) { + throw new CacheException( e ); + } + catch (IllegalStateException e) { + throw new CacheException( e ); + } + catch (net.sf.ehcache.CacheException e) { + throw new CacheException( e ); + } + } + + /** + * Remove all elements in the cache, but leave the cache + * in a useable state. + * + * @throws CacheException + */ + public void clear() throws CacheException { + try { + cache.removeAll(); + } + catch (IllegalStateException e) { + throw new CacheException( e ); + } + catch (net.sf.ehcache.CacheException e) { + throw new CacheException( e ); + } + } + + /** + * Remove the cache and make it unuseable. + * + * @throws CacheException + */ + public void destroy() throws CacheException { + try { + cache.getCacheManager().removeCache( cache.getName() ); + } + catch (IllegalStateException e) { + throw new CacheException( e ); + } + catch (net.sf.ehcache.CacheException e) { + throw new CacheException( e ); + } + } + + /** + * Calls to this method should perform there own synchronization. + * It is provided for distributed caches. Because EHCache is not distributed + * this method does nothing. + */ + public void lock(Object key) throws CacheException { + } + + /** + * Calls to this method should perform there own synchronization. + * It is provided for distributed caches. Because EHCache is not distributed + * this method does nothing. + */ + public void unlock(Object key) throws CacheException { + } + + /** + * Gets the next timestamp; + */ + public long nextTimestamp() { + return Timestamper.next(); + } + + /** + * Returns the lock timeout for this cache. + */ + public int getTimeout() { + // 60 second lock timeout + return Timestamper.ONE_MS * SIXTY_THOUSAND_MS; + } + + public String getRegionName() { + return cache.getName(); + } + + /** + * Warning: This method can be very expensive to run. Allow approximately 1 second + * per 1MB of entries. Running this method could create liveness problems + * because the object lock is held for a long period + *

    + * + * @return the approximate size of memory ehcache is using for the MemoryStore for this cache + */ + public long getSizeInMemory() { + try { + return cache.calculateInMemorySize(); + } + catch (Throwable t) { + return -1; + } + } + + public long getElementCountInMemory() { + try { + return cache.getMemoryStoreSize(); + } + catch (net.sf.ehcache.CacheException ce) { + throw new CacheException( ce ); + } + } + + public long getElementCountOnDisk() { + return cache.getDiskStoreSize(); + } + + public Map toMap() { + try { + Map result = new HashMap(); + Iterator iter = cache.getKeys().iterator(); + while ( iter.hasNext() ) { + Object key = iter.next(); + result.put( key, cache.get( key ).getObjectValue() ); + } + return result; + } + catch (Exception e) { + throw new CacheException( e ); + } + } + + public String toString() { + return "EHCache(" + getRegionName() + ')'; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/cache/EhCacheProvider.java b/src/org/hibernate/cache/EhCacheProvider.java new file mode 100644 index 0000000000..7258fa98f9 --- /dev/null +++ b/src/org/hibernate/cache/EhCacheProvider.java @@ -0,0 +1,167 @@ +//$Id$ +/** + * Copyright 2003-2006 Greg Luck, Jboss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.hibernate.cache; + +import java.util.Properties; +import java.net.URL; + +import net.sf.ehcache.CacheManager; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.cfg.Environment; +import org.hibernate.util.StringHelper; +import org.hibernate.util.ConfigHelper; + +/** + * Cache Provider plugin for Hibernate + * + * Use hibernate.cache.provider_class=org.hibernate.cache.EhCacheProvider + * in Hibernate 3.x or later + * + * Taken from EhCache 0.9 distribution + * @author Greg Luck + * @author Emmanuel Bernard + */ +/** + * Cache Provider plugin for ehcache-1.2. New in this provider are ehcache support for multiple + * Hibernate session factories, each with its own ehcache configuration, and non Serializable keys and values. + * Ehcache-1.2 also has many other features such as cluster support and listeners, which can be used seamlessly simply + * by configurion in ehcache.xml. + *

    + * Use hibernate.cache.provider_class=org.hibernate.cache.EhCacheProvider in the Hibernate configuration + * to enable this provider for Hibernate's second level cache. + *

    + * When configuring multiple ehcache CacheManagers, as you would where you have multiple Hibernate Configurations and + * multiple SessionFactories, specify in each Hibernate configuration the ehcache configuration using + * the property hibernate.cache.provider_configuration_file_resource_path An example to set an ehcache configuration + * called ehcache-2.xml would be hibernate.cache.provider_configuration_file_resource_path=/ehcache-2.xml. If the leading + * slash is not there one will be added. The configuration file will be looked for in the root of the classpath. + *

    + * Updated for ehcache-1.2. Note this provider requires ehcache-1.2.jar. Make sure ehcache-1.1.jar or earlier + * is not in the classpath or it will not work. + *

    + * See http://ehcache.sf.net for documentation on ehcache + *

    + * + * @author Greg Luck + * @author Emmanuel Bernard + */ +public class EhCacheProvider implements CacheProvider { + + private static final Log log = LogFactory.getLog(EhCacheProvider.class); + + private CacheManager manager; + + /** + * Builds a Cache. + *

    + * Even though this method provides properties, they are not used. + * Properties for EHCache are specified in the ehcache.xml file. + * Configuration will be read from ehcache.xml for a cache declaration + * where the name attribute matches the name parameter in this builder. + * + * @param name the name of the cache. Must match a cache configured in ehcache.xml + * @param properties not used + * @return a newly built cache will be built and initialised + * @throws CacheException inter alia, if a cache of the same name already exists + */ + public Cache buildCache(String name, Properties properties) throws CacheException { + try { + net.sf.ehcache.Cache cache = manager.getCache(name); + if (cache == null) { + log.warn("Could not find configuration [" + name + "]; using defaults."); + manager.addCache(name); + cache = manager.getCache(name); + log.debug("started EHCache region: " + name); + } + return new EhCache(cache); + } + catch (net.sf.ehcache.CacheException e) { + throw new CacheException(e); + } + } + + /** + * Returns the next timestamp. + */ + public long nextTimestamp() { + return Timestamper.next(); + } + + /** + * Callback to perform any necessary initialization of the underlying cache implementation + * during SessionFactory construction. + * + * @param properties current configuration settings. + */ + public void start(Properties properties) throws CacheException { + if (manager != null) { + log.warn("Attempt to restart an already started EhCacheProvider. Use sessionFactory.close() " + + " between repeated calls to buildSessionFactory. Using previously created EhCacheProvider." + + " If this behaviour is required, consider using net.sf.ehcache.hibernate.SingletonEhCacheProvider."); + return; + } + try { + String configurationResourceName = null; + if (properties != null) { + configurationResourceName = (String) properties.get( Environment.CACHE_PROVIDER_CONFIG ); + } + if ( StringHelper.isEmpty( configurationResourceName ) ) { + manager = new CacheManager(); + } else { + URL url = loadResource(configurationResourceName); + manager = new CacheManager(url); + } + } catch (net.sf.ehcache.CacheException e) { + //yukky! Don't you have subclasses for that! + //TODO race conditions can happen here + if (e.getMessage().startsWith("Cannot parseConfiguration CacheManager. Attempt to create a new instance of " + + "CacheManager using the diskStorePath")) { + throw new CacheException("Attempt to restart an already started EhCacheProvider. Use sessionFactory.close() " + + " between repeated calls to buildSessionFactory. Consider using net.sf.ehcache.hibernate.SingletonEhCacheProvider." + , e ); + } else { + throw e; + } + } + } + + private URL loadResource(String configurationResourceName) { + URL url = ConfigHelper.locateConfig( configurationResourceName ); + if (log.isDebugEnabled()) { + log.debug("Creating EhCacheProvider from a specified resource: " + + configurationResourceName + " Resolved to URL: " + url); + } + return url; + } + + /** + * Callback to perform any necessary cleanup of the underlying cache implementation + * during SessionFactory.close(). + */ + public void stop() { + if (manager != null) { + manager.shutdown(); + manager = null; + } + } + + public boolean isMinimalPutsEnabledByDefault() { + return false; + } + +} diff --git a/src/org/hibernate/cache/FilterKey.java b/src/org/hibernate/cache/FilterKey.java new file mode 100755 index 0000000000..15d6b26423 --- /dev/null +++ b/src/org/hibernate/cache/FilterKey.java @@ -0,0 +1,70 @@ +//$Id$ +package org.hibernate.cache; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import org.hibernate.EntityMode; +import org.hibernate.engine.TypedValue; +import org.hibernate.impl.FilterImpl; +import org.hibernate.type.Type; + +/** + * Allows cached queries to be keyed by enabled filters. + * + * @author Gavin King + */ +public final class FilterKey implements Serializable { + private String filterName; + private Map filterParameters = new HashMap(); + + public FilterKey(String name, Map params, Map types, EntityMode entityMode) { + filterName = name; + Iterator iter = params.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry me = (Map.Entry) iter.next(); + Type type = (Type) types.get( me.getKey() ); + filterParameters.put( me.getKey(), new TypedValue( type, me.getValue(), entityMode ) ); + } + } + + public int hashCode() { + int result = 13; + result = 37 * result + filterName.hashCode(); + result = 37 * result + filterParameters.hashCode(); + return result; + } + + public boolean equals(Object other) { + if ( !(other instanceof FilterKey) ) return false; + FilterKey that = (FilterKey) other; + if ( !that.filterName.equals(filterName) ) return false; + if ( !that.filterParameters.equals(filterParameters) ) return false; + return true; + } + + public String toString() { + return "FilterKey[" + filterName + filterParameters + ']'; + } + + public static Set createFilterKeys(Map enabledFilters, EntityMode entityMode) { + if ( enabledFilters.size()==0 ) return null; + Set result = new HashSet(); + Iterator iter = enabledFilters.values().iterator(); + while ( iter.hasNext() ) { + FilterImpl filter = (FilterImpl) iter.next(); + FilterKey key = new FilterKey( + filter.getName(), + filter.getParameters(), + filter.getFilterDefinition().getParameterTypes(), + entityMode + ); + result.add(key); + } + return result; + } +} diff --git a/src/org/hibernate/cache/HashtableCache.java b/src/org/hibernate/cache/HashtableCache.java new file mode 100644 index 0000000000..1cc65cc01a --- /dev/null +++ b/src/org/hibernate/cache/HashtableCache.java @@ -0,0 +1,90 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Collections; +import java.util.Hashtable; + +import java.util.Map; + +/** + * A lightweight implementation of the Cache interface + * @author Gavin King + */ +public class HashtableCache implements Cache { + + private final Map hashtable = new Hashtable(); + private final String regionName; + + public HashtableCache(String regionName) { + this.regionName = regionName; + } + + public String getRegionName() { + return regionName; + } + + public Object read(Object key) throws CacheException { + return hashtable.get(key); + } + + public Object get(Object key) throws CacheException { + return hashtable.get(key); + } + + public void update(Object key, Object value) throws CacheException { + put(key, value); + } + + public void put(Object key, Object value) throws CacheException { + hashtable.put(key, value); + } + + public void remove(Object key) throws CacheException { + hashtable.remove(key); + } + + public void clear() throws CacheException { + hashtable.clear(); + } + + public void destroy() throws CacheException { + + } + + public void lock(Object key) throws CacheException { + // local cache, so we use synchronization + } + + public void unlock(Object key) throws CacheException { + // local cache, so we use synchronization + } + + public long nextTimestamp() { + return Timestamper.next(); + } + + public int getTimeout() { + return Timestamper.ONE_MS * 60000; //ie. 60 seconds + } + + public long getSizeInMemory() { + return -1; + } + + public long getElementCountInMemory() { + return hashtable.size(); + } + + public long getElementCountOnDisk() { + return 0; + } + + public Map toMap() { + return Collections.unmodifiableMap(hashtable); + } + + public String toString() { + return "HashtableCache(" + regionName + ')'; + } + +} diff --git a/src/org/hibernate/cache/HashtableCacheProvider.java b/src/org/hibernate/cache/HashtableCacheProvider.java new file mode 100644 index 0000000000..13ac176983 --- /dev/null +++ b/src/org/hibernate/cache/HashtableCacheProvider.java @@ -0,0 +1,42 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Properties; + +/** + * A simple in-memory Hashtable-based cache impl. + * + * @author Gavin King + */ +public class HashtableCacheProvider implements CacheProvider { + + public Cache buildCache(String regionName, Properties properties) throws CacheException { + return new HashtableCache( regionName ); + } + + public long nextTimestamp() { + return Timestamper.next(); + } + + /** + * Callback to perform any necessary initialization of the underlying cache implementation + * during SessionFactory construction. + * + * @param properties current configuration settings. + */ + public void start(Properties properties) throws CacheException { + } + + /** + * Callback to perform any necessary cleanup of the underlying cache implementation + * during SessionFactory.close(). + */ + public void stop() { + } + + public boolean isMinimalPutsEnabledByDefault() { + return false; + } + +} + diff --git a/src/org/hibernate/cache/JndiBoundTreeCacheProvider.java b/src/org/hibernate/cache/JndiBoundTreeCacheProvider.java new file mode 100644 index 0000000000..cda5dd5111 --- /dev/null +++ b/src/org/hibernate/cache/JndiBoundTreeCacheProvider.java @@ -0,0 +1,63 @@ +// $Id$ +package org.hibernate.cache; + +import java.util.Properties; + +import javax.transaction.TransactionManager; + +import org.hibernate.transaction.TransactionManagerLookup; +import org.hibernate.transaction.TransactionManagerLookupFactory; + +/** + * Support for JBossCache (TreeCache), where the cache instance is available + * via JNDI lookup. + * + * @author Steve Ebersole + */ +public class JndiBoundTreeCacheProvider extends AbstractJndiBoundCacheProvider { + + private TransactionManager transactionManager; + + /** + * Construct a Cache representing the "region" within in the underlying cache + * provider. + * + * @param regionName the name of the cache region + * @param properties configuration settings + * + * @throws CacheException + */ + public Cache buildCache(String regionName, Properties properties) throws CacheException { + return new TreeCache( getTreeCacheInstance(), regionName, transactionManager ); + } + + public void prepare(Properties properties) throws CacheException { + TransactionManagerLookup transactionManagerLookup = TransactionManagerLookupFactory.getTransactionManagerLookup(properties); + if (transactionManagerLookup!=null) { + transactionManager = transactionManagerLookup.getTransactionManager(properties); + } + } + /** + * Generate a timestamp + */ + public long nextTimestamp() { + return System.currentTimeMillis() / 100; + } + + /** + * By default, should minimal-puts mode be enabled when using this cache. + *

    + * Since TreeCache is a clusterable cache and we are only getting a + * reference the instance from JNDI, safest to assume a clustered + * setup and return true here. + * + * @return True. + */ + public boolean isMinimalPutsEnabledByDefault() { + return true; + } + + public org.jboss.cache.TreeCache getTreeCacheInstance() { + return ( org.jboss.cache.TreeCache ) super.getCache(); + } +} diff --git a/src/org/hibernate/cache/NoCacheProvider.java b/src/org/hibernate/cache/NoCacheProvider.java new file mode 100644 index 0000000000..df77e55569 --- /dev/null +++ b/src/org/hibernate/cache/NoCacheProvider.java @@ -0,0 +1,58 @@ +// $Id$ +package org.hibernate.cache; + +import java.util.Properties; + +/** + * Implementation of NoCacheProvider. + * + * @author Steve Ebersole + */ +public class NoCacheProvider implements CacheProvider { + /** + * Configure the cache + * + * @param regionName the name of the cache region + * @param properties configuration settings + * + * @throws CacheException + */ + public Cache buildCache(String regionName, Properties properties) throws CacheException { + throw new NoCachingEnabledException(); + } + + /** + * Generate a timestamp + */ + public long nextTimestamp() { + // This, is used by SessionFactoryImpl to hand to the generated SessionImpl; + // was the only reason I could see that we cannot just use null as + // Settings.cacheProvider + return System.currentTimeMillis() / 100; + } + + /** + * Callback to perform any necessary initialization of the underlying cache implementation during SessionFactory + * construction. + * + * @param properties current configuration settings. + */ + public void start(Properties properties) throws CacheException { + // this is called by SessionFactory irregardless; we just disregard here; + // could also add a check to SessionFactory to only conditionally call start + } + + /** + * Callback to perform any necessary cleanup of the underlying cache implementation during SessionFactory.close(). + */ + public void stop() { + // this is called by SessionFactory irregardless; we just disregard here; + // could also add a check to SessionFactory to only conditionally call stop + } + + public boolean isMinimalPutsEnabledByDefault() { + // this is called from SettingsFactory irregardless; trivial to simply disregard + return false; + } + +} diff --git a/src/org/hibernate/cache/NoCachingEnabledException.java b/src/org/hibernate/cache/NoCachingEnabledException.java new file mode 100644 index 0000000000..6b713d8653 --- /dev/null +++ b/src/org/hibernate/cache/NoCachingEnabledException.java @@ -0,0 +1,20 @@ +// $Id$ +package org.hibernate.cache; + +import org.hibernate.cfg.Environment; + +/** + * Implementation of NoCachingEnabledException. + * + * @author Steve Ebersole + */ +public class NoCachingEnabledException extends CacheException { + private static final String MSG = + "Second-level cache is not enabled for usage [" + + Environment.USE_SECOND_LEVEL_CACHE + + " | " + Environment.USE_QUERY_CACHE + "]"; + + public NoCachingEnabledException() { + super( MSG ); + } +} diff --git a/src/org/hibernate/cache/NonstrictReadWriteCache.java b/src/org/hibernate/cache/NonstrictReadWriteCache.java new file mode 100644 index 0000000000..1cc71080f7 --- /dev/null +++ b/src/org/hibernate/cache/NonstrictReadWriteCache.java @@ -0,0 +1,170 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Comparator; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Caches data that is sometimes updated without ever locking the cache. + * If concurrent access to an item is possible, this concurrency strategy + * makes no guarantee that the item returned from the cache is the latest + * version available in the database. Configure your cache timeout accordingly! + * This is an "asynchronous" concurrency strategy. + * + * @author Gavin King + * @see ReadWriteCache for a much stricter algorithm + */ +public class NonstrictReadWriteCache implements CacheConcurrencyStrategy { + + private Cache cache; + + private static final Log log = LogFactory.getLog( NonstrictReadWriteCache.class ); + + public NonstrictReadWriteCache() { + } + + public void setCache(Cache cache) { + this.cache = cache; + } + + public Cache getCache() { + return cache; + } + + /** + * Get the most recent version, if available. + */ + public Object get(Object key, long txTimestamp) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "Cache lookup: " + key ); + } + + Object result = cache.get( key ); + if ( result != null ) { + log.debug( "Cache hit" ); + } + else { + log.debug( "Cache miss" ); + } + return result; + } + + /** + * Add an item to the cache. + */ + public boolean put( + Object key, + Object value, + long txTimestamp, + Object version, + Comparator versionComparator, + boolean minimalPut) throws CacheException { + if ( minimalPut && cache.get( key ) != null ) { + if ( log.isDebugEnabled() ) { + log.debug( "item already cached: " + key ); + } + return false; + } + if ( log.isDebugEnabled() ) { + log.debug( "Caching: " + key ); + } + + cache.put( key, value ); + return true; + + } + + /** + * Do nothing. + * + * @return null, no lock + */ + public SoftLock lock(Object key, Object version) throws CacheException { + return null; + } + + public void remove(Object key) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "Removing: " + key ); + } + cache.remove( key ); + } + + public void clear() throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "Clearing" ); + } + cache.clear(); + } + + public void destroy() { + try { + cache.destroy(); + } + catch ( Exception e ) { + log.warn( "could not destroy cache", e ); + } + } + + /** + * Invalidate the item + */ + public void evict(Object key) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "Invalidating: " + key ); + } + + cache.remove( key ); + } + + /** + * Invalidate the item + */ + public boolean insert(Object key, Object value, Object currentVersion) { + return false; + } + + /** + * Do nothing. + */ + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) { + evict( key ); + return false; + } + + /** + * Invalidate the item (again, for safety). + */ + public void release(Object key, SoftLock lock) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "Invalidating (again): " + key ); + } + + cache.remove( key ); + } + + /** + * Invalidate the item (again, for safety). + */ + public boolean afterUpdate(Object key, Object value, Object version, SoftLock lock) throws CacheException { + release( key, lock ); + return false; + } + + /** + * Do nothing. + */ + public boolean afterInsert(Object key, Object value, Object version) throws CacheException { + return false; + } + + public String getRegionName() { + return cache.getRegionName(); + } + + public String toString() { + return cache + "(nonstrict-read-write)"; + } +} diff --git a/src/org/hibernate/cache/OSCache.java b/src/org/hibernate/cache/OSCache.java new file mode 100644 index 0000000000..2802482aa8 --- /dev/null +++ b/src/org/hibernate/cache/OSCache.java @@ -0,0 +1,111 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Map; + +import com.opensymphony.oscache.base.NeedsRefreshException; +import com.opensymphony.oscache.general.GeneralCacheAdministrator; + +/** + * @author Mathias Bogaert + */ +public class OSCache implements Cache { + + /** + * The OSCache 2.0 cache administrator. + */ + private GeneralCacheAdministrator cache = new GeneralCacheAdministrator(); + + private final int refreshPeriod; + private final String cron; + private final String regionName; + + private String toString(Object key) { + return String.valueOf(key) + '.' + regionName; + } + + public OSCache(int refreshPeriod, String cron, String region) { + this.refreshPeriod = refreshPeriod; + this.cron = cron; + this.regionName = region; + } + + public void setCacheCapacity(int cacheCapacity) { + cache.setCacheCapacity(cacheCapacity); + } + + public Object get(Object key) throws CacheException { + try { + return cache.getFromCache( toString(key), refreshPeriod, cron ); + } + catch (NeedsRefreshException e) { + cache.cancelUpdate( toString(key) ); + return null; + } + } + + public Object read(Object key) throws CacheException { + return get(key); + } + + public void update(Object key, Object value) throws CacheException { + put(key, value); + } + + public void put(Object key, Object value) throws CacheException { + cache.putInCache( toString(key), value ); + } + + public void remove(Object key) throws CacheException { + cache.flushEntry( toString(key) ); + } + + public void clear() throws CacheException { + cache.flushAll(); + } + + public void destroy() throws CacheException { + cache.destroy(); + } + + public void lock(Object key) throws CacheException { + // local cache, so we use synchronization + } + + public void unlock(Object key) throws CacheException { + // local cache, so we use synchronization + } + + public long nextTimestamp() { + return Timestamper.next(); + } + + public int getTimeout() { + return Timestamper.ONE_MS * 60000; //ie. 60 seconds + } + + public String getRegionName() { + return regionName; + } + + public long getSizeInMemory() { + return -1; + } + + public long getElementCountInMemory() { + return -1; + } + + public long getElementCountOnDisk() { + return -1; + } + + public Map toMap() { + throw new UnsupportedOperationException(); + } + + public String toString() { + return "OSCache(" + regionName + ')'; + } + +} diff --git a/src/org/hibernate/cache/OSCacheProvider.java b/src/org/hibernate/cache/OSCacheProvider.java new file mode 100644 index 0000000000..ca3cf80ca8 --- /dev/null +++ b/src/org/hibernate/cache/OSCacheProvider.java @@ -0,0 +1,87 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Properties; + +import org.hibernate.util.PropertiesHelper; +import org.hibernate.util.StringHelper; + +import com.opensymphony.oscache.base.CacheEntry; +import com.opensymphony.oscache.base.Config; + +/** + * Support for OpenSymphony OSCache. This implementation assumes + * that identifiers have well-behaved toString() methods. + * + * @author Mathias Bogaert + */ +public class OSCacheProvider implements CacheProvider { + + /** + * The OSCache refresh period property suffix. + */ + public static final String OSCACHE_REFRESH_PERIOD = "refresh.period"; + /** + * The OSCache CRON expression property suffix. + */ + public static final String OSCACHE_CRON = "cron"; + /** + * The OSCache cache capacity property suffix. + */ + public static final String OSCACHE_CAPACITY = "capacity"; + + private static final Properties OSCACHE_PROPERTIES = new Config().getProperties(); + + /** + * Builds a new {@link Cache} instance, and gets it's properties from the OSCache {@link Config} + * which reads the properties file (oscache.properties) from the classpath. + * If the file cannot be found or loaded, an the defaults are used. + * + * @param region + * @param properties + * @return + * @throws CacheException + */ + public Cache buildCache(String region, Properties properties) throws CacheException { + + int refreshPeriod = PropertiesHelper.getInt( + StringHelper.qualify(region, OSCACHE_REFRESH_PERIOD), + OSCACHE_PROPERTIES, + CacheEntry.INDEFINITE_EXPIRY + ); + String cron = OSCACHE_PROPERTIES.getProperty( StringHelper.qualify(region, OSCACHE_CRON) ); + + // construct the cache + final OSCache cache = new OSCache(refreshPeriod, cron, region); + + Integer capacity = PropertiesHelper.getInteger( StringHelper.qualify(region, OSCACHE_CAPACITY), OSCACHE_PROPERTIES ); + if ( capacity!=null ) cache.setCacheCapacity( capacity.intValue() ); + + return cache; + } + + public long nextTimestamp() { + return Timestamper.next(); + } + + /** + * Callback to perform any necessary initialization of the underlying cache implementation + * during SessionFactory construction. + * + * @param properties current configuration settings. + */ + public void start(Properties properties) throws CacheException { + } + + /** + * Callback to perform any necessary cleanup of the underlying cache implementation + * during SessionFactory.close(). + */ + public void stop() { + } + + public boolean isMinimalPutsEnabledByDefault() { + return false; + } + +} diff --git a/src/org/hibernate/cache/OptimisticCache.java b/src/org/hibernate/cache/OptimisticCache.java new file mode 100644 index 0000000000..7bcbcb24b8 --- /dev/null +++ b/src/org/hibernate/cache/OptimisticCache.java @@ -0,0 +1,64 @@ +package org.hibernate.cache; + +/** + * A contract for transactional cache implementations which support + * optimistic locking of items within the cache. + *

    + * The optimisitic locking capabilities are only utilized for + * the entity cache regions. + *

    + * Unlike the methods on the {@link Cache} interface, all the methods + * here will only ever be called from access scenarios where versioned + * data is actually a possiblity (i.e., entity data). Be sure to consult + * with {@link OptimisticCacheSource#isVersioned()} to determine whether + * versioning is actually in effect. + * + * @author Steve Ebersole + */ +public interface OptimisticCache extends Cache { + /** + * Indicates the "source" of the cached data. Currently this will + * only ever represent an {@link org.hibernate.persister.entity.EntityPersister}. + *

    + * Made available to the cache so that it can access certain information + * about versioning strategy. + * + * @param source The source. + */ + public void setSource(OptimisticCacheSource source); + + /** + * Called during {@link CacheConcurrencyStrategy#insert} processing for + * transactional strategies. Indicates we have just performed an insert + * into the DB and now need to cache that entity's data. + * + * @param key The cache key. + * @param value The data to be cached. + * @param currentVersion The entity's version; or null if not versioned. + */ + public void writeInsert(Object key, Object value, Object currentVersion); + + /** + * Called during {@link CacheConcurrencyStrategy#update} processing for + * transactional strategies. Indicates we have just performed an update + * against the DB and now need to cache the updated state. + * + * @param key The cache key. + * @param value The data to be cached. + * @param currentVersion The entity's current version + * @param previousVersion The entity's previous version (before the update); + * or null if not versioned. + */ + public void writeUpdate(Object key, Object value, Object currentVersion, Object previousVersion); + + /** + * Called during {@link CacheConcurrencyStrategy#put} processing for + * transactional strategies. Indicates we have just loaded an entity's + * state from the database and need it cached. + * + * @param key The cache key. + * @param value The data to be cached. + * @param currentVersion The entity's version; or null if not versioned. + */ + public void writeLoad(Object key, Object value, Object currentVersion); +} diff --git a/src/org/hibernate/cache/OptimisticCacheSource.java b/src/org/hibernate/cache/OptimisticCacheSource.java new file mode 100644 index 0000000000..ca01e4f8c7 --- /dev/null +++ b/src/org/hibernate/cache/OptimisticCacheSource.java @@ -0,0 +1,29 @@ +package org.hibernate.cache; + +import java.util.Comparator; + +/** + * Contract for sources of optimistically lockable data sent to the second level + * cache. + *

    + * Note currently {@link org.hibernate.persister.entity.EntityPersister}s are + * the only viable source. + * + * @author Steve Ebersole + */ +public interface OptimisticCacheSource { + /** + * Does this source represent versioned (i.e., and thus optimistically + * lockable) data? + * + * @return True if this source represents versioned data; false otherwise. + */ + public boolean isVersioned(); + + /** + * Get the comparator used to compare two different version values together. + * + * @return An appropriate comparator. + */ + public Comparator getVersionComparator(); +} diff --git a/src/org/hibernate/cache/OptimisticTreeCache.java b/src/org/hibernate/cache/OptimisticTreeCache.java new file mode 100644 index 0000000000..d31c0cfbc3 --- /dev/null +++ b/src/org/hibernate/cache/OptimisticTreeCache.java @@ -0,0 +1,329 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.Comparator; + + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jboss.cache.Fqn; +import org.jboss.cache.optimistic.DataVersion; +import org.jboss.cache.config.Option; +import org.jboss.cache.lock.TimeoutException; + +/** + * Represents a particular region within the given JBossCache TreeCache + * utilizing TreeCache's optimistic locking capabilities. + * + * @see OptimisticTreeCacheProvider for more details + * + * @author Steve Ebersole + */ +public class OptimisticTreeCache implements OptimisticCache { + + // todo : eventually merge this with TreeCache and just add optional opt-lock support there. + + private static final Log log = LogFactory.getLog( OptimisticTreeCache.class); + + private static final String ITEM = "item"; + + private org.jboss.cache.TreeCache cache; + private final String regionName; + private final Fqn regionFqn; + private OptimisticCacheSource source; + + public OptimisticTreeCache(org.jboss.cache.TreeCache cache, String regionName) + throws CacheException { + this.cache = cache; + this.regionName = regionName; + this.regionFqn = Fqn.fromString( regionName.replace( '.', '/' ) ); + } + + + // OptimisticCache impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void setSource(OptimisticCacheSource source) { + this.source = source; + } + + public void writeInsert(Object key, Object value, Object currentVersion) { + writeUpdate( key, value, currentVersion, null ); + } + + public void writeUpdate(Object key, Object value, Object currentVersion, Object previousVersion) { + try { + Option option = new Option(); + DataVersion dv = ( source != null && source.isVersioned() ) + ? new DataVersionAdapter( currentVersion, previousVersion, source.getVersionComparator(), source.toString() ) + : NonLockingDataVersion.INSTANCE; + option.setDataVersion( dv ); + cache.put( new Fqn( regionFqn, key ), ITEM, value, option ); + } + catch ( Exception e ) { + throw new CacheException( e ); + } + } + + public void writeLoad(Object key, Object value, Object currentVersion) { + try { + Option option = new Option(); + option.setFailSilently( true ); + option.setDataVersion( NonLockingDataVersion.INSTANCE ); + cache.remove( new Fqn( regionFqn, key ), "ITEM", option ); + + option = new Option(); + option.setFailSilently( true ); + DataVersion dv = ( source != null && source.isVersioned() ) + ? new DataVersionAdapter( currentVersion, currentVersion, source.getVersionComparator(), source.toString() ) + : NonLockingDataVersion.INSTANCE; + option.setDataVersion( dv ); + cache.put( new Fqn( regionFqn, key ), ITEM, value, option ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + + // Cache impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Object get(Object key) throws CacheException { + try { + Option option = new Option(); + option.setFailSilently( true ); +// option.setDataVersion( NonLockingDataVersion.INSTANCE ); + return cache.get( new Fqn( regionFqn, key ), ITEM, option ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public Object read(Object key) throws CacheException { + try { + return cache.get( new Fqn( regionFqn, key ), ITEM ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void update(Object key, Object value) throws CacheException { + try { + Option option = new Option(); + option.setDataVersion( NonLockingDataVersion.INSTANCE ); + cache.put( new Fqn( regionFqn, key ), ITEM, value, option ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void put(Object key, Object value) throws CacheException { + try { + log.trace( "performing put() into region [" + regionName + "]" ); + // do the put outside the scope of the JTA txn + Option option = new Option(); + option.setFailSilently( true ); + option.setDataVersion( NonLockingDataVersion.INSTANCE ); + cache.put( new Fqn( regionFqn, key ), ITEM, value, option ); + } + catch (TimeoutException te) { + //ignore! + log.debug("ignoring write lock acquisition failure"); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void remove(Object key) throws CacheException { + try { + // tree cache in optimistic mode seems to have as very difficult + // time with remove calls on non-existent nodes (NPEs)... + if ( cache.get( new Fqn( regionFqn, key ), ITEM ) != null ) { + Option option = new Option(); + option.setDataVersion( NonLockingDataVersion.INSTANCE ); + cache.remove( new Fqn( regionFqn, key ), option ); + } + else { + log.trace( "skipping remove() call as the underlying node did not seem to exist" ); + } + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void clear() throws CacheException { + try { + Option option = new Option(); + option.setDataVersion( NonLockingDataVersion.INSTANCE ); + cache.remove( regionFqn, option ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void destroy() throws CacheException { + try { + Option option = new Option(); + option.setCacheModeLocal( true ); + option.setFailSilently( true ); + option.setDataVersion( NonLockingDataVersion.INSTANCE ); + cache.remove( regionFqn, option ); + } + catch( Exception e ) { + throw new CacheException( e ); + } + } + + public void lock(Object key) throws CacheException { + throw new UnsupportedOperationException( "TreeCache is a fully transactional cache" + regionName ); + } + + public void unlock(Object key) throws CacheException { + throw new UnsupportedOperationException( "TreeCache is a fully transactional cache: " + regionName ); + } + + public long nextTimestamp() { + return System.currentTimeMillis() / 100; + } + + public int getTimeout() { + return 600; //60 seconds + } + + public String getRegionName() { + return regionName; + } + + public long getSizeInMemory() { + return -1; + } + + public long getElementCountInMemory() { + try { + Set children = cache.getChildrenNames( regionFqn ); + return children == null ? 0 : children.size(); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public long getElementCountOnDisk() { + return 0; + } + + public Map toMap() { + try { + Map result = new HashMap(); + Set childrenNames = cache.getChildrenNames( regionFqn ); + if (childrenNames != null) { + Iterator iter = childrenNames.iterator(); + while ( iter.hasNext() ) { + Object key = iter.next(); + result.put( + key, + cache.get( new Fqn( regionFqn, key ), ITEM ) + ); + } + } + return result; + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public String toString() { + return "OptimisticTreeCache(" + regionName + ')'; + } + + public static class DataVersionAdapter implements DataVersion { + private final Object currentVersion; + private final Object previousVersion; + private final Comparator versionComparator; + private final String sourceIdentifer; + + public DataVersionAdapter(Object currentVersion, Object previousVersion, Comparator versionComparator, String sourceIdentifer) { + this.currentVersion = currentVersion; + this.previousVersion = previousVersion; + this.versionComparator = versionComparator; + this.sourceIdentifer = sourceIdentifer; + log.trace( "created " + this ); + } + + /** + * newerThan() call is dispatched against the DataVersion currently + * associated with the node; the passed dataVersion param is the + * DataVersion associated with the data we are trying to put into + * the node. + *

    + * we are expected to return true in the case where we (the current + * node DataVersion) are newer that then incoming value. Returning + * true here essentially means that a optimistic lock failure has + * occured (because conversely, the value we are trying to put into + * the node is "older than" the value already there...) + */ + public boolean newerThan(DataVersion dataVersion) { + log.trace( "checking [" + this + "] against [" + dataVersion + "]" ); + if ( dataVersion instanceof CircumventChecksDataVersion ) { + log.trace( "skipping lock checks..." ); + return false; + } + else if ( dataVersion instanceof NonLockingDataVersion ) { + // can happen because of the multiple ways Cache.remove() + // can be invoked :( + log.trace( "skipping lock checks..." ); + return false; + } + DataVersionAdapter other = ( DataVersionAdapter ) dataVersion; + if ( other.previousVersion == null ) { + log.warn( "Unexpected optimistic lock check on inserting data" ); + // work around the "feature" where tree cache is validating the + // inserted node during the next transaction. no idea... + if ( this == dataVersion ) { + log.trace( "skipping lock checks due to same DV instance" ); + return false; + } + } + return versionComparator.compare( currentVersion, other.previousVersion ) >= 1; + } + + public String toString() { + return super.toString() + " [current=" + currentVersion + ", previous=" + previousVersion + ", src=" + sourceIdentifer + "]"; + } + } + + /** + * Used in regions where no locking should ever occur. This includes query-caches, + * update-timestamps caches, collection caches, and entity caches where the entity + * is not versioned. + */ + public static class NonLockingDataVersion implements DataVersion { + public static final DataVersion INSTANCE = new NonLockingDataVersion(); + public boolean newerThan(DataVersion dataVersion) { + log.trace( "non locking lock check..."); + return false; + } + } + + /** + * Used to signal to a DataVersionAdapter to simply not perform any checks. This + * is currently needed for proper handling of remove() calls for entity cache regions + * (we do not know the version info...). + */ + public static class CircumventChecksDataVersion implements DataVersion { + public static final DataVersion INSTANCE = new CircumventChecksDataVersion(); + public boolean newerThan(DataVersion dataVersion) { + throw new CacheException( "optimistic locking checks should never happen on CircumventChecksDataVersion" ); + } + } +} diff --git a/src/org/hibernate/cache/OptimisticTreeCacheProvider.java b/src/org/hibernate/cache/OptimisticTreeCacheProvider.java new file mode 100644 index 0000000000..fd2cc7458c --- /dev/null +++ b/src/org/hibernate/cache/OptimisticTreeCacheProvider.java @@ -0,0 +1,130 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Properties; +import javax.transaction.TransactionManager; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.cfg.Environment; +import org.hibernate.transaction.TransactionManagerLookup; +import org.hibernate.transaction.TransactionManagerLookupFactory; +import org.jboss.cache.PropertyConfigurator; + +/** + * Support for a standalone JBossCache TreeCache instance utilizing TreeCache's + * optimistic locking capabilities. This capability was added in JBossCache + * version 1.3.0; as such this provider will only work with that version or + * higher. + *

    + * The TreeCache instance is configured via a local config resource. The + * resource to be used for configuration can be controlled by specifying a value + * for the {@link #CONFIG_RESOURCE} config property. + * + * @author Steve Ebersole + */ +public class OptimisticTreeCacheProvider implements CacheProvider { + + /** + * @deprecated use {@link Environment.CACHE_PROVIDER_CONFIG} + */ + public static final String CONFIG_RESOURCE = "hibernate.cache.opt_tree_cache.config"; + public static final String DEFAULT_CONFIG = "treecache.xml"; + + private static final String NODE_LOCKING_SCHEME = "OPTIMISTIC"; + private static final Log log = LogFactory.getLog( OptimisticTreeCacheProvider.class ); + + private org.jboss.cache.TreeCache cache; + + /** + * Construct and configure the Cache representation of a named cache region. + * + * @param regionName the name of the cache region + * @param properties configuration settings + * @return The Cache representation of the named cache region. + * @throws CacheException + * Indicates an error building the cache region. + */ + public Cache buildCache(String regionName, Properties properties) throws CacheException { + return new OptimisticTreeCache( cache, regionName ); + } + + public long nextTimestamp() { + return System.currentTimeMillis() / 100; + } + + /** + * Prepare the underlying JBossCache TreeCache instance. + * + * @param properties All current config settings. + * @throws CacheException + * Indicates a problem preparing cache for use. + */ + public void start(Properties properties) { + String resource = properties.getProperty( Environment.CACHE_PROVIDER_CONFIG ); + if (resource == null) { + resource = properties.getProperty( CONFIG_RESOURCE ); + } + if ( resource == null ) { + resource = DEFAULT_CONFIG; + } + log.debug( "Configuring TreeCache from resource [" + resource + "]" ); + try { + cache = new org.jboss.cache.TreeCache(); + PropertyConfigurator config = new PropertyConfigurator(); + config.configure( cache, resource ); + TransactionManagerLookup transactionManagerLookup = + TransactionManagerLookupFactory.getTransactionManagerLookup( properties ); + if ( transactionManagerLookup == null ) { + throw new CacheException( + "JBossCache only supports optimisitc locking with a configured " + + "TransactionManagerLookup (" + Environment.TRANSACTION_MANAGER_STRATEGY + ")" + ); + } + cache.setTransactionManagerLookup( + new TransactionManagerLookupAdaptor( + transactionManagerLookup, + properties + ) + ); + if ( ! NODE_LOCKING_SCHEME.equalsIgnoreCase( cache.getNodeLockingScheme() ) ) { + log.info( "Overriding node-locking-scheme to : " + NODE_LOCKING_SCHEME ); + cache.setNodeLockingScheme( NODE_LOCKING_SCHEME ); + } + cache.start(); + } + catch ( Exception e ) { + throw new CacheException( e ); + } + } + + public void stop() { + if ( cache != null ) { + cache.stop(); + cache.destroy(); + cache = null; + } + } + + public boolean isMinimalPutsEnabledByDefault() { + return true; + } + + static final class TransactionManagerLookupAdaptor implements org.jboss.cache.TransactionManagerLookup { + private final TransactionManagerLookup tml; + private final Properties props; + + TransactionManagerLookupAdaptor(TransactionManagerLookup tml, Properties props) { + this.tml = tml; + this.props = props; + } + + public TransactionManager getTransactionManager() throws Exception { + return tml.getTransactionManager( props ); + } + } + + public org.jboss.cache.TreeCache getUnderlyingCache() { + return cache; + } +} diff --git a/src/org/hibernate/cache/QueryCache.java b/src/org/hibernate/cache/QueryCache.java new file mode 100644 index 0000000000..0cc457cf31 --- /dev/null +++ b/src/org/hibernate/cache/QueryCache.java @@ -0,0 +1,33 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.List; +import java.util.Set; + +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.type.Type; + +/** + * Defines the contract for caches capable of storing query results. These + * caches should only concern themselves with storing the matching result ids. + * The transactional semantics are necessarily less strict than the semantics + * of an item cache. + * + * @author Gavin King + */ +public interface QueryCache { + + public void clear() throws CacheException; + + public boolean put(QueryKey key, Type[] returnTypes, List result, boolean isNaturalKeyLookup, SessionImplementor session) throws HibernateException; + + public List get(QueryKey key, Type[] returnTypes, boolean isNaturalKeyLookup, Set spaces, SessionImplementor session) + throws HibernateException; + + public void destroy(); + + public Cache getCache(); + + public String getRegionName(); +} diff --git a/src/org/hibernate/cache/QueryCacheFactory.java b/src/org/hibernate/cache/QueryCacheFactory.java new file mode 100644 index 0000000000..9472364fa6 --- /dev/null +++ b/src/org/hibernate/cache/QueryCacheFactory.java @@ -0,0 +1,24 @@ +// $Id$ +package org.hibernate.cache; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Settings; + +import java.util.Properties; + +/** + * Defines a factory for query cache instances. These factories are responsible for + * creating individual QueryCache instances. + * + * @author Steve Ebersole + */ +public interface QueryCacheFactory { + + public QueryCache getQueryCache( + String regionName, + UpdateTimestampsCache updateTimestampsCache, + Settings settings, + Properties props) + throws HibernateException; + +} diff --git a/src/org/hibernate/cache/QueryKey.java b/src/org/hibernate/cache/QueryKey.java new file mode 100644 index 0000000000..5ccdcf48e2 --- /dev/null +++ b/src/org/hibernate/cache/QueryKey.java @@ -0,0 +1,117 @@ +//$Id$ +package org.hibernate.cache; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; + +import org.hibernate.EntityMode; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.RowSelection; +import org.hibernate.transform.ResultTransformer; +import org.hibernate.type.Type; +import org.hibernate.util.EqualsHelper; + +/** + * A key that identifies a particular query with bound parameter values + * @author Gavin King + */ +public class QueryKey implements Serializable { + private final String sqlQueryString; + private final Type[] types; + private final Object[] values; + private final Integer firstRow; + private final Integer maxRows; + private final Map namedParameters; + private final EntityMode entityMode; + private final Set filters; + private final int hashCode; + + // the user provided resulttransformer, not the one used with "select new". Here to avoid mangling transformed/non-transformed results. + private final ResultTransformer customTransformer; + + public QueryKey(String queryString, QueryParameters queryParameters, Set filters, EntityMode entityMode) { + this.sqlQueryString = queryString; + this.types = queryParameters.getPositionalParameterTypes(); + this.values = queryParameters.getPositionalParameterValues(); + RowSelection selection = queryParameters.getRowSelection(); + if (selection!=null) { + firstRow = selection.getFirstRow(); + maxRows = selection.getMaxRows(); + } + else { + firstRow = null; + maxRows = null; + } + this.namedParameters = queryParameters.getNamedParameters(); + this.entityMode = entityMode; + this.filters = filters; + this.customTransformer = queryParameters.getResultTransformer(); + this.hashCode = getHashCode(); + } + + public boolean equals(Object other) { + QueryKey that = (QueryKey) other; + if ( !sqlQueryString.equals(that.sqlQueryString) ) return false; + if ( !EqualsHelper.equals(firstRow, that.firstRow) || !EqualsHelper.equals(maxRows, that.maxRows) ) return false; + if ( !EqualsHelper.equals(customTransformer, that.customTransformer) ) return false; + if (types==null) { + if (that.types!=null) return false; + } + else { + if (that.types==null) return false; + if ( types.length!=that.types.length ) return false; + for ( int i=0; ialmost maintains the semantics. + * Repeatable read isolation is compromised in the case of concurrent writes. + * This is an "asynchronous" concurrency strategy.
    + *
    + * If this strategy is used in a cluster, the underlying cache implementation + * must support distributed hard locks (which are held only momentarily). This + * strategy also assumes that the underlying cache implementation does not do + * asynchronous replication and that state has been fully replicated as soon + * as the lock is released. + * + * @see NonstrictReadWriteCache for a faster algorithm + * @see CacheConcurrencyStrategy + */ +public class ReadWriteCache implements CacheConcurrencyStrategy { + + private static final Log log = LogFactory.getLog(ReadWriteCache.class); + + private Cache cache; + private int nextLockId; + + public ReadWriteCache() {} + + public void setCache(Cache cache) { + this.cache=cache; + } + + public Cache getCache() { + return cache; + } + + public String getRegionName() { + return cache.getRegionName(); + } + + /** + * Generate an id for a new lock. Uniqueness per cache instance is very + * desirable but not absolutely critical. Must be called from one of the + * synchronized methods of this class. + */ + private int nextLockId() { + if (nextLockId==Integer.MAX_VALUE) nextLockId = Integer.MIN_VALUE; + return nextLockId++; + } + + /** + * Do not return an item whose timestamp is later than the current + * transaction timestamp. (Otherwise we might compromise repeatable + * read unnecessarily.) Do not return an item which is soft-locked. + * Always go straight to the database instead.
    + *
    + * Note that since reading an item from that cache does not actually + * go to the database, it is possible to see a kind of phantom read + * due to the underlying row being updated after we have read it + * from the cache. This would not be possible in a lock-based + * implementation of repeatable read isolation. It is also possible + * to overwrite changes made and committed by another transaction + * after the current transaction read the item from the cache. This + * problem would be caught by the update-time version-checking, if + * the data is versioned or timestamped. + */ + public synchronized Object get(Object key, long txTimestamp) throws CacheException { + + if ( log.isTraceEnabled() ) log.trace("Cache lookup: " + key); + + /*try { + cache.lock(key);*/ + + Lockable lockable = (Lockable) cache.get(key); + + boolean gettable = lockable!=null && lockable.isGettable(txTimestamp); + + if (gettable) { + if ( log.isTraceEnabled() ) log.trace("Cache hit: " + key); + return ( (Item) lockable ).getValue(); + } + else { + if ( log.isTraceEnabled() ) { + if (lockable==null) { + log.trace("Cache miss: " + key); + } + else { + log.trace("Cached item was locked: " + key); + } + } + return null; + } + /*} + finally { + cache.unlock(key); + }*/ + } + + /** + * Stop any other transactions reading or writing this item to/from + * the cache. Send them straight to the database instead. (The lock + * does time out eventually.) This implementation tracks concurrent + * locks of transactions which simultaneously attempt to write to an + * item. + */ + public synchronized SoftLock lock(Object key, Object version) throws CacheException { + if ( log.isTraceEnabled() ) log.trace("Invalidating: " + key); + + try { + cache.lock(key); + + Lockable lockable = (Lockable) cache.get(key); + long timeout = cache.nextTimestamp() + cache.getTimeout(); + final Lock lock = (lockable==null) ? + new Lock( timeout, nextLockId(), version ) : + lockable.lock( timeout, nextLockId() ); + cache.update(key, lock); + return lock; + } + finally { + cache.unlock(key); + } + + } + + /** + * Do not add an item to the cache unless the current transaction + * timestamp is later than the timestamp at which the item was + * invalidated. (Otherwise, a stale item might be re-added if the + * database is operating in repeatable read isolation mode.) + * For versioned data, don't add the item unless it is the later + * version. + */ + public synchronized boolean put( + Object key, + Object value, + long txTimestamp, + Object version, + Comparator versionComparator, + boolean minimalPut) + throws CacheException { + if ( log.isTraceEnabled() ) log.trace("Caching: " + key); + + try { + cache.lock(key); + + Lockable lockable = (Lockable) cache.get(key); + + boolean puttable = lockable==null || + lockable.isPuttable(txTimestamp, version, versionComparator); + + if (puttable) { + cache.put( key, new Item( value, version, cache.nextTimestamp() ) ); + if ( log.isTraceEnabled() ) log.trace("Cached: " + key); + return true; + } + else { + if ( log.isTraceEnabled() ) { + if ( lockable.isLock() ) { + log.trace("Item was locked: " + key); + } + else { + log.trace("Item was already cached: " + key); + } + } + return false; + } + } + finally { + cache.unlock(key); + } + } + + /** + * decrement a lock and put it back in the cache + */ + private void decrementLock(Object key, Lock lock) throws CacheException { + //decrement the lock + lock.unlock( cache.nextTimestamp() ); + cache.update(key, lock); + } + + /** + * Release the soft lock on the item. Other transactions may now + * re-cache the item (assuming that no other transaction holds a + * simultaneous lock). + */ + public synchronized void release(Object key, SoftLock clientLock) throws CacheException { + if ( log.isTraceEnabled() ) log.trace("Releasing: " + key); + + try { + cache.lock(key); + + Lockable lockable = (Lockable) cache.get(key); + if ( isUnlockable(clientLock, lockable) ) { + decrementLock(key, (Lock) lockable); + } + else { + handleLockExpiry(key); + } + } + finally { + cache.unlock(key); + } + } + + void handleLockExpiry(Object key) throws CacheException { + log.warn("An item was expired by the cache while it was locked (increase your cache timeout): " + key); + long ts = cache.nextTimestamp() + cache.getTimeout(); + // create new lock that times out immediately + Lock lock = new Lock( ts, nextLockId(), null ); + lock.unlock(ts); + cache.update(key, lock); + } + + public void clear() throws CacheException { + cache.clear(); + } + + public void remove(Object key) throws CacheException { + cache.remove(key); + } + + public void destroy() { + try { + cache.destroy(); + } + catch (Exception e) { + log.warn("could not destroy cache", e); + } + } + + /** + * Re-cache the updated state, if and only if there there are + * no other concurrent soft locks. Release our lock. + */ + public synchronized boolean afterUpdate(Object key, Object value, Object version, SoftLock clientLock) + throws CacheException { + + if ( log.isTraceEnabled() ) log.trace("Updating: " + key); + + try { + cache.lock(key); + + Lockable lockable = (Lockable) cache.get(key); + if ( isUnlockable(clientLock, lockable) ) { + Lock lock = (Lock) lockable; + if ( lock.wasLockedConcurrently() ) { + // just decrement the lock, don't recache + // (we don't know which transaction won) + decrementLock(key, lock); + return false; + } + else { + //recache the updated state + cache.update( key, new Item( value, version, cache.nextTimestamp() ) ); + if ( log.isTraceEnabled() ) log.trace("Updated: " + key); + return true; + } + } + else { + handleLockExpiry(key); + return false; + } + + } + finally { + cache.unlock(key); + } + } + + /** + * Add the new item to the cache, checking that no other transaction has + * accessed the item. + */ + public synchronized boolean afterInsert(Object key, Object value, Object version) + throws CacheException { + + if ( log.isTraceEnabled() ) log.trace("Inserting: " + key); + try { + cache.lock(key); + + Lockable lockable = (Lockable) cache.get(key); + if (lockable==null) { + cache.update( key, new Item( value, version, cache.nextTimestamp() ) ); + if ( log.isTraceEnabled() ) log.trace("Inserted: " + key); + return true; + } + else { + return false; + } + } + finally { + cache.unlock(key); + } + } + + /** + * Do nothing. + */ + public void evict(Object key) throws CacheException { + // noop + } + + /** + * Do nothing. + */ + public boolean insert(Object key, Object value, Object currentVersion) { + return false; + } + + /** + * Do nothing. + */ + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) { + return false; + } + + /** + * Is the client's lock commensurate with the item in the cache? + * If it is not, we know that the cache expired the original + * lock. + */ + private boolean isUnlockable(SoftLock clientLock, Lockable myLock) + throws CacheException { + //null clientLock is remotely possible but will never happen in practice + return myLock!=null && + myLock.isLock() && + clientLock!=null && + ( (Lock) clientLock ).getId()==( (Lock) myLock ).getId(); + } + + public static interface Lockable { + public Lock lock(long timeout, int id); + public boolean isLock(); + public boolean isGettable(long txTimestamp); + public boolean isPuttable(long txTimestamp, Object newVersion, Comparator comparator); + } + + /** + * An item of cached data, timestamped with the time it was cached,. + * @see ReadWriteCache + */ + public static final class Item implements Serializable, Lockable { + + private final long freshTimestamp; + private final Object value; + private final Object version; + + public Item(Object value, Object version, long currentTimestamp) { + this.value = value; + this.version = version; + freshTimestamp = currentTimestamp; + } + /** + * The timestamp on the cached data + */ + public long getFreshTimestamp() { + return freshTimestamp; + } + /** + * The actual cached data + */ + public Object getValue() { + return value; + } + + /** + * Lock the item + */ + public Lock lock(long timeout, int id) { + return new Lock(timeout, id, version); + } + /** + * Not a lock! + */ + public boolean isLock() { + return false; + } + /** + * Is this item visible to the timestamped + * transaction? + */ + public boolean isGettable(long txTimestamp) { + return freshTimestamp < txTimestamp; + } + + /** + * Don't overwite already cached items + */ + public boolean isPuttable(long txTimestamp, Object newVersion, Comparator comparator) { + // we really could refresh the item if it + // is not a lock, but it might be slower + //return freshTimestamp < txTimestamp + return version!=null && comparator.compare(version, newVersion) < 0; + } + + public String toString() { + return "Item{version=" + version + + ",freshTimestamp=" + freshTimestamp; + } + } + + /** + * A soft lock which supports concurrent locking, + * timestamped with the time it was released + * @author Gavin King + */ + public static final class Lock implements Serializable, Lockable, SoftLock { + private long unlockTimestamp = -1; + private int multiplicity = 1; + private boolean concurrentLock = false; + private long timeout; + private final int id; + private final Object version; + + public Lock(long timeout, int id, Object version) { + this.timeout = timeout; + this.id = id; + this.version = version; + } + + public long getUnlockTimestamp() { + return unlockTimestamp; + } + /** + * Increment the lock, setting the + * new lock timeout + */ + public Lock lock(long timeout, int id) { + concurrentLock = true; + multiplicity++; + this.timeout = timeout; + return this; + } + /** + * Decrement the lock, setting the unlock + * timestamp if now unlocked + * @param currentTimestamp + */ + public void unlock(long currentTimestamp) { + if ( --multiplicity == 0 ) { + unlockTimestamp = currentTimestamp; + } + } + + /** + * Can the timestamped transaction re-cache this + * locked item now? + */ + public boolean isPuttable(long txTimestamp, Object newVersion, Comparator comparator) { + if (timeout < txTimestamp) return true; + if (multiplicity>0) return false; + return version==null ? + unlockTimestamp < txTimestamp : + comparator.compare(version, newVersion) < 0; //by requiring <, we rely on lock timeout in the case of an unsuccessful update! + } + + /** + * Was this lock held concurrently by multiple + * transactions? + */ + public boolean wasLockedConcurrently() { + return concurrentLock; + } + /** + * Yes, this is a lock + */ + public boolean isLock() { + return true; + } + /** + * locks are not returned to the client! + */ + public boolean isGettable(long txTimestamp) { + return false; + } + + public int getId() { return id; } + + public String toString() { + return "Lock{id=" + id + + ",version=" + version + + ",multiplicity=" + multiplicity + + ",unlockTimestamp=" + unlockTimestamp; + } + + } + + public String toString() { + return cache + "(read-write)"; + } + +} + + + + + + diff --git a/src/org/hibernate/cache/StandardQueryCache.java b/src/org/hibernate/cache/StandardQueryCache.java new file mode 100644 index 0000000000..cdb7693076 --- /dev/null +++ b/src/org/hibernate/cache/StandardQueryCache.java @@ -0,0 +1,182 @@ +//$Id$ +package org.hibernate.cache; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.UnresolvableObjectException; +import org.hibernate.cfg.Settings; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; + +/** + * The standard implementation of the Hibernate QueryCache interface. This + * implementation is very good at recognizing stale query results and + * and re-running queries when it detects this condition, recaching the new + * results. + * + * @author Gavin King + */ +public class StandardQueryCache implements QueryCache { + + private static final Log log = LogFactory.getLog( StandardQueryCache.class ); + + private Cache queryCache; + private UpdateTimestampsCache updateTimestampsCache; + private final String regionName; + + public void clear() throws CacheException { + queryCache.clear(); + } + + public StandardQueryCache( + final Settings settings, + final Properties props, + final UpdateTimestampsCache updateTimestampsCache, + String regionName) throws HibernateException { + if ( regionName == null ) { + regionName = StandardQueryCache.class.getName(); + } + String prefix = settings.getCacheRegionPrefix(); + if ( prefix != null ) { + regionName = prefix + '.' + regionName; + } + log.info( "starting query cache at region: " + regionName ); + + this.queryCache = settings.getCacheProvider().buildCache(regionName, props); + this.updateTimestampsCache = updateTimestampsCache; + this.regionName = regionName; + } + + public boolean put( + QueryKey key, + Type[] returnTypes, + List result, + boolean isNaturalKeyLookup, + SessionImplementor session) throws HibernateException { + + if ( isNaturalKeyLookup && result.size()==0 ) { + return false; + } + else { + Long ts = new Long( session.getTimestamp() ); + + if ( log.isDebugEnabled() ) { + log.debug( "caching query results in region: " + regionName + "; timestamp=" + ts ); + } + + List cacheable = new ArrayList( result.size()+1 ); + cacheable.add( ts ); + for ( int i=0; inull + * @throws CacheException + */ + public Object get(Object key) throws CacheException { + if (key instanceof Serializable) { + return cache.get( (Serializable) key ); + } + else { + throw new CacheException("Keys must implement Serializable"); + } + } + + public Object read(Object key) throws CacheException { + return get(key); + } + + /** + * Add an item to the cache + * @param key + * @param value + * @throws CacheException + */ + public void update(Object key, Object value) throws CacheException { + put(key, value); + } + + /** + * Add an item to the cache + * @param key + * @param value + * @throws CacheException + */ + public void put(Object key, Object value) throws CacheException { + if (key instanceof Serializable) { + cache.put( (Serializable) key, value ); + } + else { + throw new CacheException("Keys must implement Serializable"); + } + } + + /** + * Remove an item from the cache + */ + public void remove(Object key) throws CacheException { + if (key instanceof Serializable) { + cache.clear( (Serializable) key ); + } + else { + throw new CacheException("Keys must implement Serializable"); + } + } + + /** + * Clear the cache + */ + public void clear() throws CacheException { + cache.clearAll(); + } + + /** + * Clean up + */ + public void destroy() throws CacheException { + cache.clearAll(); + } + + /** + * If this is a clustered cache, lock the item + */ + public void lock(Object key) throws CacheException { + throw new UnsupportedOperationException("SwarmCache does not support locking (use nonstrict-read-write)"); + } + + /** + * If this is a clustered cache, unlock the item + */ + public void unlock(Object key) throws CacheException { + throw new UnsupportedOperationException("SwarmCache does not support locking (use nonstrict-read-write)"); + } + + /** + * Generate a (coarse) timestamp + */ + public long nextTimestamp() { + return System.currentTimeMillis() / 100; + } + + /** + * Get a reasonable "lock timeout" + */ + public int getTimeout() { + return 600; + } + + public String getRegionName() { + return regionName; + } + + public long getSizeInMemory() { + return -1; + } + + public long getElementCountInMemory() { + return -1; + } + + public long getElementCountOnDisk() { + return -1; + } + + public Map toMap() { + throw new UnsupportedOperationException(); + } + + public String toString() { + return "SwarmCache(" + regionName + ')'; + } + +} diff --git a/src/org/hibernate/cache/SwarmCacheProvider.java b/src/org/hibernate/cache/SwarmCacheProvider.java new file mode 100644 index 0000000000..38c53db5d5 --- /dev/null +++ b/src/org/hibernate/cache/SwarmCacheProvider.java @@ -0,0 +1,58 @@ +//$Id$ +package org.hibernate.cache; + +import net.sf.swarmcache.CacheConfiguration; +import net.sf.swarmcache.CacheConfigurationManager; +import net.sf.swarmcache.CacheFactory; +import net.sf.swarmcache.ObjectCache; + +import java.util.Properties; + +/** + * Support for SwarmCache replicated cache. SwarmCache does not support + * locking, so strict "read-write" semantics are unsupported. + * @author Jason Carreira + */ +public class SwarmCacheProvider implements CacheProvider { + + private CacheFactory factory; + + public Cache buildCache(String regionName, Properties properties) throws CacheException { + ObjectCache cache = factory.createCache(regionName); + if (cache==null) { + throw new CacheException("SwarmCache did not create a cache: " + regionName); + } + return new SwarmCache(cache, regionName); + } + + public long nextTimestamp() { + return System.currentTimeMillis() / 100; + } + + /** + * Callback to perform any necessary initialization of the underlying cache implementation + * during SessionFactory construction. + * + * @param properties current configuration settings. + */ + public void start(Properties properties) throws CacheException { + CacheConfiguration config = CacheConfigurationManager.getConfig(properties); + factory = new CacheFactory(config); + } + + /** + * Callback to perform any necessary cleanup of the underlying cache implementation + * during SessionFactory.close(). + */ + public void stop() { + if (factory != null) { + factory.shutdown(); + factory = null; + } + } + + public boolean isMinimalPutsEnabledByDefault() { + return true; + } + +} diff --git a/src/org/hibernate/cache/Timestamper.java b/src/org/hibernate/cache/Timestamper.java new file mode 100644 index 0000000000..6aca831d02 --- /dev/null +++ b/src/org/hibernate/cache/Timestamper.java @@ -0,0 +1,37 @@ +//$Id$ +package org.hibernate.cache; + +/** + * Generates increasing identifiers (in a single VM only). + * Not valid across multiple VMs. Identifiers are not necessarily + * strictly increasing, but usually are. + */ +public final class Timestamper { + private static short counter = 0; + private static long time; + private static final int BIN_DIGITS = 12; + public static final short ONE_MS = 1<ReadWriteCache. This is + * a "synchronous" concurrency strategy. + * + * @author Gavin King + */ +public class TransactionalCache implements CacheConcurrencyStrategy { + + private static final Log log = LogFactory.getLog( TransactionalCache.class ); + + private Cache cache; + + public String getRegionName() { + return cache.getRegionName(); + } + + public Object get(Object key, long txTimestamp) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "cache lookup: " + key ); + } + Object result = cache.read( key ); + if ( log.isDebugEnabled() ) { + log.debug( result == null ? "cache miss" : "cache hit" ); + } + return result; + } + + public boolean put( + Object key, + Object value, + long txTimestamp, + Object version, + Comparator versionComparator, + boolean minimalPut) throws CacheException { + if ( minimalPut && cache.read( key ) != null ) { + if ( log.isDebugEnabled() ) { + log.debug( "item already cached: " + key ); + } + return false; + } + if ( log.isDebugEnabled() ) { + log.debug( "caching: " + key ); + } + if ( cache instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache ).writeLoad( key, value, version ); + } + else { + cache.put( key, value ); + } + return true; + } + + /** + * Do nothing, returning null. + */ + public SoftLock lock(Object key, Object version) throws CacheException { + //noop + return null; + } + + /** + * Do nothing. + */ + public void release(Object key, SoftLock clientLock) throws CacheException { + //noop + } + + public boolean update( + Object key, + Object value, + Object currentVersion, + Object previousVersion) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "updating: " + key ); + } + if ( cache instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache ).writeUpdate( key, value, currentVersion, previousVersion ); + } + else { + cache.update( key, value ); + } + return true; + } + + public boolean insert( + Object key, + Object value, + Object currentVersion) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "inserting: " + key ); + } + if ( cache instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache ).writeInsert( key, value, currentVersion ); + } + else { + cache.update( key, value ); + } + return true; + } + + public void evict(Object key) throws CacheException { + cache.remove( key ); + } + + public void remove(Object key) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug( "removing: " + key ); + } + cache.remove( key ); + } + + public void clear() throws CacheException { + log.debug( "clearing" ); + cache.clear(); + } + + public void destroy() { + try { + cache.destroy(); + } + catch ( Exception e ) { + log.warn( "could not destroy cache", e ); + } + } + + public void setCache(Cache cache) { + this.cache = cache; + } + + public Cache getCache() { + return cache; + } + + /** + * Do nothing. + */ + public boolean afterInsert( + Object key, + Object value, + Object version) throws CacheException { + return false; + } + + /** + * Do nothing. + */ + public boolean afterUpdate( + Object key, + Object value, + Object version, + SoftLock clientLock) throws CacheException { + return false; + } + + public String toString() { + return cache + "(transactional)"; + } + +} diff --git a/src/org/hibernate/cache/TreeCache.java b/src/org/hibernate/cache/TreeCache.java new file mode 100644 index 0000000000..a89599ac9c --- /dev/null +++ b/src/org/hibernate/cache/TreeCache.java @@ -0,0 +1,205 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import javax.transaction.SystemException; +import javax.transaction.Transaction; +import javax.transaction.TransactionManager; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jboss.cache.Fqn; +import org.jboss.cache.lock.TimeoutException; + +/** + * Represents a particular region within the given JBossCache TreeCache. + * + * @author Gavin King + */ +public class TreeCache implements Cache { + + private static final Log log = LogFactory.getLog(TreeCache.class); + + private static final String ITEM = "item"; + + private org.jboss.cache.TreeCache cache; + private final String regionName; + private final Fqn regionFqn; + private final TransactionManager transactionManager; + + public TreeCache(org.jboss.cache.TreeCache cache, String regionName, TransactionManager transactionManager) + throws CacheException { + this.cache = cache; + this.regionName = regionName; + this.regionFqn = Fqn.fromString( regionName.replace( '.', '/' ) ); + this.transactionManager = transactionManager; + } + + public Object get(Object key) throws CacheException { + Transaction tx = suspend(); + try { + return read(key); + } + finally { + resume( tx ); + } + } + + public Object read(Object key) throws CacheException { + try { + return cache.get( new Fqn( regionFqn, key ), ITEM ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void update(Object key, Object value) throws CacheException { + try { + cache.put( new Fqn( regionFqn, key ), ITEM, value ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void put(Object key, Object value) throws CacheException { + Transaction tx = suspend(); + try { + //do the failfast put outside the scope of the JTA txn + cache.putFailFast( new Fqn( regionFqn, key ), ITEM, value, 0 ); + } + catch (TimeoutException te) { + //ignore! + log.debug("ignoring write lock acquisition failure"); + } + catch (Exception e) { + throw new CacheException(e); + } + finally { + resume( tx ); + } + } + + private void resume(Transaction tx) { + try { + if (tx!=null) transactionManager.resume(tx); + } + catch (Exception e) { + throw new CacheException("Could not resume transaction", e); + } + } + + private Transaction suspend() { + Transaction tx = null; + try { + if ( transactionManager!=null ) { + tx = transactionManager.suspend(); + } + } + catch (SystemException se) { + throw new CacheException("Could not suspend transaction", se); + } + return tx; + } + + public void remove(Object key) throws CacheException { + try { + cache.remove( new Fqn( regionFqn, key ) ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void clear() throws CacheException { + try { + cache.remove( regionFqn ); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void destroy() throws CacheException { + try { + // NOTE : evict() operates locally only (i.e., does not propogate + // to any other nodes in the potential cluster). This is + // exactly what is needed when we destroy() here; destroy() is used + // as part of the process of shutting down a SessionFactory; thus + // these removals should not be propogated + cache.evict( regionFqn ); + } + catch( Exception e ) { + throw new CacheException( e ); + } + } + + public void lock(Object key) throws CacheException { + throw new UnsupportedOperationException( "TreeCache is a fully transactional cache" + regionName ); + } + + public void unlock(Object key) throws CacheException { + throw new UnsupportedOperationException( "TreeCache is a fully transactional cache: " + regionName ); + } + + public long nextTimestamp() { + return System.currentTimeMillis() / 100; + } + + public int getTimeout() { + return 600; //60 seconds + } + + public String getRegionName() { + return regionName; + } + + public long getSizeInMemory() { + return -1; + } + + public long getElementCountInMemory() { + try { + Set children = cache.getChildrenNames( regionFqn ); + return children == null ? 0 : children.size(); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public long getElementCountOnDisk() { + return 0; + } + + public Map toMap() { + try { + Map result = new HashMap(); + Set childrenNames = cache.getChildrenNames( regionFqn ); + if (childrenNames != null) { + Iterator iter = childrenNames.iterator(); + while ( iter.hasNext() ) { + Object key = iter.next(); + result.put( + key, + cache.get( new Fqn( regionFqn, key ), ITEM ) + ); + } + } + return result; + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public String toString() { + return "TreeCache(" + regionName + ')'; + } + +} diff --git a/src/org/hibernate/cache/TreeCacheProvider.java b/src/org/hibernate/cache/TreeCacheProvider.java new file mode 100644 index 0000000000..b73737e5a7 --- /dev/null +++ b/src/org/hibernate/cache/TreeCacheProvider.java @@ -0,0 +1,109 @@ +//$Id$ +package org.hibernate.cache; + +import java.util.Properties; +import javax.transaction.TransactionManager; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.transaction.TransactionManagerLookup; +import org.hibernate.transaction.TransactionManagerLookupFactory; +import org.hibernate.cfg.Environment; +import org.jboss.cache.PropertyConfigurator; + +/** + * Support for a standalone JBossCache (TreeCache) instance. The JBossCache is configured + * via a local config resource. + * + * @author Gavin King + */ +public class TreeCacheProvider implements CacheProvider { + + /** + * @deprecated use {@link org.hibernate.cfg.Environment.CACHE_PROVIDER_CONFIG} + */ + public static final String CONFIG_RESOURCE = "hibernate.cache.tree_cache.config"; + public static final String DEFAULT_CONFIG = "treecache.xml"; + + private static final Log log = LogFactory.getLog( TreeCacheProvider.class ); + + private org.jboss.cache.TreeCache cache; + private TransactionManager transactionManager; + + /** + * Construct and configure the Cache representation of a named cache region. + * + * @param regionName the name of the cache region + * @param properties configuration settings + * @return The Cache representation of the named cache region. + * @throws CacheException Indicates an error building the cache region. + */ + public Cache buildCache(String regionName, Properties properties) throws CacheException { + return new TreeCache(cache, regionName, transactionManager); + } + + public long nextTimestamp() { + return System.currentTimeMillis() / 100; + } + + /** + * Prepare the underlying JBossCache TreeCache instance. + * + * @param properties All current config settings. + * + * @throws CacheException Indicates a problem preparing cache for use. + */ + public void start(Properties properties) { + String resource = properties.getProperty( Environment.CACHE_PROVIDER_CONFIG ); + + if ( resource == null ) { + resource = properties.getProperty( CONFIG_RESOURCE ); + } + if ( resource == null ) { + resource = DEFAULT_CONFIG; + } + log.debug( "Configuring TreeCache from resource [" + resource + "]" ); + try { + cache = new org.jboss.cache.TreeCache(); + PropertyConfigurator config = new PropertyConfigurator(); + config.configure( cache, resource ); + TransactionManagerLookup transactionManagerLookup = TransactionManagerLookupFactory.getTransactionManagerLookup(properties); + if (transactionManagerLookup!=null) { + cache.setTransactionManagerLookup( new TransactionManagerLookupAdaptor(transactionManagerLookup, properties) ); + transactionManager = transactionManagerLookup.getTransactionManager(properties); + } + cache.start(); + } + catch (Exception e) { + throw new CacheException(e); + } + } + + public void stop() { + if (cache!=null) { + cache.stop(); + cache.destroy(); + cache=null; + } + } + + public boolean isMinimalPutsEnabledByDefault() { + return true; + } + + static final class TransactionManagerLookupAdaptor implements org.jboss.cache.TransactionManagerLookup { + private final TransactionManagerLookup tml; + private final Properties props; + TransactionManagerLookupAdaptor(TransactionManagerLookup tml, Properties props) { + this.tml=tml; + this.props=props; + } + public TransactionManager getTransactionManager() throws Exception { + return tml.getTransactionManager(props); + } + } + + public org.jboss.cache.TreeCache getUnderlyingCache() { + return cache; + } +} diff --git a/src/org/hibernate/cache/UpdateTimestampsCache.java b/src/org/hibernate/cache/UpdateTimestampsCache.java new file mode 100644 index 0000000000..d45fec29d3 --- /dev/null +++ b/src/org/hibernate/cache/UpdateTimestampsCache.java @@ -0,0 +1,117 @@ +//$Id$ +package org.hibernate.cache; + +import java.io.Serializable; +import java.util.Iterator; +import java.util.Properties; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Settings; + +/** + * Tracks the timestamps of the most recent updates to particular tables. It is + * important that the cache timeout of the underlying cache implementation be set + * to a higher value than the timeouts of any of the query caches. In fact, we + * recommend that the the underlying cache not be configured for expiry at all. + * Note, in particular, that an LRU cache expiry policy is never appropriate. + * + * @author Gavin King + * @author Mikheil Kapanadze + */ +public class UpdateTimestampsCache { + + public static final String REGION_NAME = UpdateTimestampsCache.class.getName(); + + private static final Log log = LogFactory.getLog(UpdateTimestampsCache.class); + + private Cache updateTimestamps; + private final String regionName; + + public void clear() throws CacheException { + updateTimestamps.clear(); + } + + public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException { + String prefix = settings.getCacheRegionPrefix(); + regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME; + log.info( "starting update timestamps cache at region: " + regionName ); + this.updateTimestamps = settings.getCacheProvider().buildCache( regionName, props ); + } + + public synchronized void preinvalidate(Serializable[] spaces) throws CacheException { + //TODO: to handle concurrent writes correctly, this should return a Lock to the client + Long ts = new Long( updateTimestamps.nextTimestamp() + updateTimestamps.getTimeout() ); + for ( int i=0; i= timestamp.longValue() ) { + return false; + } + } + } + return true; + } + + public void destroy() { + try { + updateTimestamps.destroy(); + } + catch (Exception e) { + log.warn("could not destroy UpdateTimestamps cache", e); + } + } + + public Cache getCache() { + return updateTimestamps; + } + + public String getRegionName() { + return regionName; + } + + public String toString() { + return "UpdateTimestampeCache"; + } + +} diff --git a/src/org/hibernate/cache/entry/CacheEntry.java b/src/org/hibernate/cache/entry/CacheEntry.java new file mode 100644 index 0000000000..0f0451611a --- /dev/null +++ b/src/org/hibernate/cache/entry/CacheEntry.java @@ -0,0 +1,143 @@ +//$Id$ +package org.hibernate.cache.entry; + +import java.io.Serializable; + +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.event.PreLoadEvent; +import org.hibernate.event.PreLoadEventListener; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.ArrayHelper; + +/** + * A cached instance of a persistent class + * + * @author Gavin King + */ +public final class CacheEntry implements Serializable { + + private final Serializable[] disassembledState; + private final String subclass; + private final boolean lazyPropertiesAreUnfetched; + private final Object version; + + public String getSubclass() { + return subclass; + } + + public boolean areLazyPropertiesUnfetched() { + return lazyPropertiesAreUnfetched; + } + + public CacheEntry( + final Object[] state, + final EntityPersister persister, + final boolean unfetched, + final Object version, + final SessionImplementor session, + final Object owner) + throws HibernateException { + //disassembled state gets put in a new array (we write to cache by value!) + this.disassembledState = TypeFactory.disassemble( + state, + persister.getPropertyTypes(), + persister.isLazyPropertiesCacheable() ? + null : persister.getPropertyLaziness(), + session, + owner + ); + subclass = persister.getEntityName(); + lazyPropertiesAreUnfetched = unfetched || !persister.isLazyPropertiesCacheable(); + this.version = version; + } + + public Object getVersion() { + return version; + } + + CacheEntry(Serializable[] state, String subclass, boolean unfetched, Object version) { + this.disassembledState = state; + this.subclass = subclass; + this.lazyPropertiesAreUnfetched = unfetched; + this.version = version; + } + + public Object[] assemble( + final Object instance, + final Serializable id, + final EntityPersister persister, + final Interceptor interceptor, + final EventSource session) + throws HibernateException { + + if ( !persister.getEntityName().equals(subclass) ) { + throw new AssertionFailure("Tried to assemble a different subclass instance"); + } + + return assemble(disassembledState, instance, id, persister, interceptor, session); + + } + + private static Object[] assemble( + final Serializable[] values, + final Object result, + final Serializable id, + final EntityPersister persister, + final Interceptor interceptor, + final EventSource session) + throws HibernateException { + + //assembled state gets put in a new array (we read from cache by value!) + Object[] assembledProps = TypeFactory.assemble( + values, + persister.getPropertyTypes(), + session, result + ); + + //persister.setIdentifier(result, id); //before calling interceptor, for consistency with normal load + + //TODO: reuse the PreLoadEvent + PreLoadEvent preLoadEvent = new PreLoadEvent( session ) + .setEntity(result) + .setState(assembledProps) + .setId(id) + .setPersister(persister); + + PreLoadEventListener[] listeners = session.getListeners().getPreLoadEventListeners(); + for ( int i = 0; i < listeners.length; i++ ) { + listeners[i].onPreLoad(preLoadEvent); + } + + persister.setPropertyValues( + result, + assembledProps, + session.getEntityMode() + ); + + return assembledProps; + } + + public Serializable[] getDisassembledState() { + // todo: this was added to support initializing an entity's EntityEntry snapshot during reattach; + // this should be refactored to instead expose a method to assemble a EntityEntry based on this + // state for return. + return disassembledState; + } + + public String toString() { + return "CacheEntry(" + subclass + ')' + + ArrayHelper.toString(disassembledState); + } + +} + + + + + + diff --git a/src/org/hibernate/cache/entry/CacheEntryStructure.java b/src/org/hibernate/cache/entry/CacheEntryStructure.java new file mode 100755 index 0000000000..7dc0f331ba --- /dev/null +++ b/src/org/hibernate/cache/entry/CacheEntryStructure.java @@ -0,0 +1,14 @@ +//$Id$ +package org.hibernate.cache.entry; + +import org.hibernate.engine.SessionFactoryImplementor; + + + +/** + * @author Gavin King + */ +public interface CacheEntryStructure { + public Object structure(Object item); + public Object destructure(Object map, SessionFactoryImplementor factory); +} diff --git a/src/org/hibernate/cache/entry/CollectionCacheEntry.java b/src/org/hibernate/cache/entry/CollectionCacheEntry.java new file mode 100755 index 0000000000..155d922fd9 --- /dev/null +++ b/src/org/hibernate/cache/entry/CollectionCacheEntry.java @@ -0,0 +1,43 @@ +//$Id$ +package org.hibernate.cache.entry; + +import java.io.Serializable; + +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.util.ArrayHelper; + +/** + * @author Gavin King + */ +public class CollectionCacheEntry implements Serializable { + + private final Serializable state; + + public Serializable[] getState() { + //TODO: assumes all collections disassemble to an array! + return (Serializable[]) state; + } + + public CollectionCacheEntry(PersistentCollection collection, CollectionPersister persister) { + this.state = collection.disassemble(persister); + } + + CollectionCacheEntry(Serializable state) { + this.state = state; + } + + public void assemble( + final PersistentCollection collection, + final CollectionPersister persister, + final Object owner + ) { + collection.initializeFromCache(persister, state, owner); + collection.afterInitialize(); + } + + public String toString() { + return "CollectionCacheEntry" + ArrayHelper.toString( getState() ); + } + +} diff --git a/src/org/hibernate/cache/entry/StructuredCacheEntry.java b/src/org/hibernate/cache/entry/StructuredCacheEntry.java new file mode 100755 index 0000000000..596e60dc44 --- /dev/null +++ b/src/org/hibernate/cache/entry/StructuredCacheEntry.java @@ -0,0 +1,48 @@ +//$Id$ +package org.hibernate.cache.entry; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.persister.entity.EntityPersister; + +/** + * @author Gavin King + */ +public class StructuredCacheEntry implements CacheEntryStructure { + + private EntityPersister persister; + + public StructuredCacheEntry(EntityPersister persister) { + this.persister = persister; + } + + public Object destructure(Object item, SessionFactoryImplementor factory) { + Map map = (Map) item; + boolean lazyPropertiesUnfetched = ( (Boolean) map.get("_lazyPropertiesUnfetched") ).booleanValue(); + String subclass = (String) map.get("_subclass"); + Object version = map.get("_version"); + EntityPersister subclassPersister = factory.getEntityPersister(subclass); + String[] names = subclassPersister.getPropertyNames(); + Serializable[] state = new Serializable[names.length]; + for ( int i=0; i + + + +

    + This package defines formats for disassembled state + kept in the second level cache. +

    + + diff --git a/src/org/hibernate/cache/package.html b/src/org/hibernate/cache/package.html new file mode 100755 index 0000000000..b89c4996a5 --- /dev/null +++ b/src/org/hibernate/cache/package.html @@ -0,0 +1,16 @@ + + + + +

    + This package defines APIs and implementations for the + second-level cache and query cache. +

    +

    + Cache abstracts the underlying cache implementation. + CacheConcurrencyStrategy abstracts various strategies + for maintaining consistency between the cache and database. + QueryCache abstracts the query result set cache. +

    + + diff --git a/src/org/hibernate/cfg/CollectionSecondPass.java b/src/org/hibernate/cfg/CollectionSecondPass.java new file mode 100644 index 0000000000..d150e2c9ac --- /dev/null +++ b/src/org/hibernate/cfg/CollectionSecondPass.java @@ -0,0 +1,73 @@ +//$Id$ +package org.hibernate.cfg; + +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.MappingException; +import org.hibernate.mapping.Collection; +import org.hibernate.mapping.IndexedCollection; +import org.hibernate.mapping.OneToMany; +import org.hibernate.mapping.Selectable; +import org.hibernate.mapping.Value; + +/** + * Collection second pass + * + * @author Emmanuel Bernard + */ +public abstract class CollectionSecondPass implements SecondPass { + private static Log log = LogFactory.getLog( CollectionSecondPass.class ); + Mappings mappings; + Collection collection; + private Map localInheritedMetas; + + public CollectionSecondPass(Mappings mappings, Collection collection, java.util.Map inheritedMetas) { + this.collection = collection; + this.mappings = mappings; + this.localInheritedMetas = inheritedMetas; + } + + public CollectionSecondPass(Mappings mappings, Collection collection) { + this(mappings, collection, Collections.EMPTY_MAP); + } + + public void doSecondPass(java.util.Map persistentClasses) + throws MappingException { + if ( log.isDebugEnabled() ) + log.debug( "Second pass for collection: " + collection.getRole() ); + + secondPass( persistentClasses, localInheritedMetas ); // using local since the inheritedMetas at this point is not the correct map since it is always the empty map + collection.createAllKeys(); + + if ( log.isDebugEnabled() ) { + String msg = "Mapped collection key: " + columns( collection.getKey() ); + if ( collection.isIndexed() ) + msg += ", index: " + columns( ( (IndexedCollection) collection ).getIndex() ); + if ( collection.isOneToMany() ) { + msg += ", one-to-many: " + + ( (OneToMany) collection.getElement() ).getReferencedEntityName(); + } + else { + msg += ", element: " + columns( collection.getElement() ); + } + log.debug( msg ); + } + } + + abstract public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas) + throws MappingException; + + private static String columns(Value val) { + StringBuffer columns = new StringBuffer(); + Iterator iter = val.getColumnIterator(); + while ( iter.hasNext() ) { + columns.append( ( (Selectable) iter.next() ).getText() ); + if ( iter.hasNext() ) columns.append( ", " ); + } + return columns.toString(); + } +} diff --git a/src/org/hibernate/cfg/Configuration.java b/src/org/hibernate/cfg/Configuration.java new file mode 100644 index 0000000000..82b57db43b --- /dev/null +++ b/src/org/hibernate/cfg/Configuration.java @@ -0,0 +1,2103 @@ +//$Id$ +package org.hibernate.cfg; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.Serializable; +import java.io.StringReader; +import java.lang.reflect.Array; +import java.net.URL; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.TreeMap; +import java.util.jar.JarFile; +import java.util.zip.ZipEntry; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.dom4j.Attribute; +import org.dom4j.DocumentException; +import org.dom4j.Element; +import org.hibernate.EmptyInterceptor; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.InvalidMappingException; +import org.hibernate.MappingException; +import org.hibernate.MappingNotFoundException; +import org.hibernate.SessionFactory; +import org.hibernate.proxy.EntityNotFoundDelegate; +import org.hibernate.dialect.Dialect; +import org.hibernate.dialect.MySQLDialect; +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.engine.FilterDefinition; +import org.hibernate.engine.Mapping; +import org.hibernate.event.AutoFlushEventListener; +import org.hibernate.event.DeleteEventListener; +import org.hibernate.event.DirtyCheckEventListener; +import org.hibernate.event.EventListeners; +import org.hibernate.event.EvictEventListener; +import org.hibernate.event.FlushEntityEventListener; +import org.hibernate.event.FlushEventListener; +import org.hibernate.event.InitializeCollectionEventListener; +import org.hibernate.event.LoadEventListener; +import org.hibernate.event.LockEventListener; +import org.hibernate.event.MergeEventListener; +import org.hibernate.event.PersistEventListener; +import org.hibernate.event.PostDeleteEventListener; +import org.hibernate.event.PostInsertEventListener; +import org.hibernate.event.PostLoadEventListener; +import org.hibernate.event.PostUpdateEventListener; +import org.hibernate.event.PreDeleteEventListener; +import org.hibernate.event.PreInsertEventListener; +import org.hibernate.event.PreLoadEventListener; +import org.hibernate.event.PreUpdateEventListener; +import org.hibernate.event.RefreshEventListener; +import org.hibernate.event.ReplicateEventListener; +import org.hibernate.event.SaveOrUpdateEventListener; +import org.hibernate.id.IdentifierGenerator; +import org.hibernate.id.PersistentIdentifierGenerator; +import org.hibernate.impl.SessionFactoryImpl; +import org.hibernate.mapping.AuxiliaryDatabaseObject; +import org.hibernate.mapping.Collection; +import org.hibernate.mapping.ForeignKey; +import org.hibernate.mapping.IdentifierCollection; +import org.hibernate.mapping.Index; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.mapping.Property; +import org.hibernate.mapping.RootClass; +import org.hibernate.mapping.SimpleValue; +import org.hibernate.mapping.Table; +import org.hibernate.mapping.UniqueKey; +import org.hibernate.secure.JACCConfiguration; +import org.hibernate.tool.hbm2ddl.DatabaseMetadata; +import org.hibernate.tool.hbm2ddl.TableMetadata; +import org.hibernate.type.SerializationException; +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.CollectionHelper; +import org.hibernate.util.ConfigHelper; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.SerializationHelper; +import org.hibernate.util.StringHelper; +import org.hibernate.util.XMLHelper; +import org.hibernate.util.PropertiesHelper; +import org.w3c.dom.Document; +import org.xml.sax.EntityResolver; +import org.xml.sax.InputSource; + +/** + * An instance of Configuration allows the application + * to specify properties and mapping documents to be used when + * creating a SessionFactory. Usually an application will create + * a single Configuration, build a single instance of + * SessionFactory and then instantiate Sessions in + * threads servicing client requests. The Configuration is meant + * only as an initialization-time object. SessionFactorys are + * immutable and do not retain any association back to the + * Configuration.
    + *
    + * A new Configuration will use the properties specified in + * hibernate.properties by default. + * + * @author Gavin King + * @see org.hibernate.SessionFactory + */ +public class Configuration implements Serializable { + + private static Log log = LogFactory.getLog( Configuration.class ); + + protected Map classes; + protected Map imports; + protected Map collections; + protected Map tables; + protected List auxiliaryDatabaseObjects; + protected Map sqlFunctions; + protected Map namedQueries; + protected Map namedSqlQueries; + /** + * Map result set name, result set description + */ + protected Map sqlResultSetMappings; + protected Map filterDefinitions; + protected List secondPasses; + protected List propertyReferences; +// protected List extendsQueue; + protected Map extendsQueue; + protected Map tableNameBinding; + protected Map columnNameBindingPerTable; + private Interceptor interceptor; + private Properties properties; + private EntityResolver entityResolver; + private EntityNotFoundDelegate entityNotFoundDelegate; + + protected transient XMLHelper xmlHelper; + protected transient Map typeDefs; + + protected NamingStrategy namingStrategy; + + private EventListeners eventListeners; + + protected final SettingsFactory settingsFactory; + + protected void reset() { + classes = new HashMap(); + imports = new HashMap(); + collections = new HashMap(); + tables = new TreeMap(); + namedQueries = new HashMap(); + namedSqlQueries = new HashMap(); + sqlResultSetMappings = new HashMap(); + xmlHelper = new XMLHelper(); + typeDefs = new HashMap(); + propertyReferences = new ArrayList(); + secondPasses = new ArrayList(); + interceptor = EmptyInterceptor.INSTANCE; + properties = Environment.getProperties(); + entityResolver = XMLHelper.DEFAULT_DTD_RESOLVER; + eventListeners = new EventListeners(); + filterDefinitions = new HashMap(); +// extendsQueue = new ArrayList(); + extendsQueue = new HashMap(); + auxiliaryDatabaseObjects = new ArrayList(); + tableNameBinding = new HashMap(); + columnNameBindingPerTable = new HashMap(); + namingStrategy = DefaultNamingStrategy.INSTANCE; + sqlFunctions = new HashMap(); + } + + private transient Mapping mapping = buildMapping(); + + + + protected Configuration(SettingsFactory settingsFactory) { + this.settingsFactory = settingsFactory; + reset(); + } + + public Configuration() { + this( new SettingsFactory() ); + } + + /** + * Iterate the entity mappings + * + * @return Iterator of the entity mappings currently contained in the configuration. + */ + public Iterator getClassMappings() { + return classes.values().iterator(); + } + + /** + * Iterate the collection mappings + * + * @return Iterator of the collection mappings currently contained in the configuration. + */ + public Iterator getCollectionMappings() { + return collections.values().iterator(); + } + + /** + * Iterate the table mappings + * + * @return Iterator of the table mappings currently contained in the configuration. + */ + public Iterator getTableMappings() { + return tables.values().iterator(); + } + + /** + * Get the mapping for a particular entity + * + * @param entityName An entity name. + * @return the entity mapping information + */ + public PersistentClass getClassMapping(String entityName) { + return (PersistentClass) classes.get( entityName ); + } + + /** + * Get the mapping for a particular collection role + * + * @param role a collection role + * @return The collection mapping information + */ + public Collection getCollectionMapping(String role) { + return (Collection) collections.get( role ); + } + + /** + * Set a custom entity resolver. This entity resolver must be + * set before addXXX(misc) call. + * Default value is {@link org.hibernate.util.DTDEntityResolver} + * + * @param entityResolver entity resolver to use + */ + public void setEntityResolver(EntityResolver entityResolver) { + this.entityResolver = entityResolver; + } + + public EntityResolver getEntityResolver() { + return entityResolver; + } + + /** + * Retrieve the user-supplied delegate to handle non-existent entity + * scenarios. May be null. + * + * @return The user-supplied delegate + */ + public EntityNotFoundDelegate getEntityNotFoundDelegate() { + return entityNotFoundDelegate; + } + + /** + * Specify a user-supplied delegate to be used to handle scenarios where an entity could not be + * located by specified id. This is mainly intended for EJB3 implementations to be able to + * control how proxy initialization errors should be handled... + * + * @param entityNotFoundDelegate The delegate to use + */ + public void setEntityNotFoundDelegate(EntityNotFoundDelegate entityNotFoundDelegate) { + this.entityNotFoundDelegate = entityNotFoundDelegate; + } + + /** + * Read mappings from a particular XML file + * + * @param xmlFile a path to a file + * @return this (for method chaining purposes) + * @throws org.hibernate.MappingException Indicates inability to locate or parse + * the specified mapping file. + * @see #addFile(java.io.File) + */ + public Configuration addFile(String xmlFile) throws MappingException { + return addFile( new File( xmlFile ) ); + } + + /** + * Read mappings from a particular XML file + * + * @param xmlFile a path to a file + * @return this (for method chaining purposes) + * @throws org.hibernate.MappingException Indicates inability to locate or parse + * the specified mapping file. + */ + public Configuration addFile(File xmlFile) throws MappingException { + log.info( "Reading mappings from file: " + xmlFile.getPath() ); + if ( !xmlFile.exists() ) { + throw new MappingNotFoundException( "file", xmlFile.toString() ); + } + try { + List errors = new ArrayList(); + org.dom4j.Document doc = xmlHelper.createSAXReader( xmlFile.toString(), errors, entityResolver ).read( xmlFile ); + if ( errors.size() != 0 ) { + throw new InvalidMappingException( "file", xmlFile.toString(), ( Throwable ) errors.get( 0 ) ); + } + add( doc ); + return this; + } + catch ( InvalidMappingException e ) { + throw e; + } + catch ( MappingNotFoundException e ) { + throw e; + } + catch ( Exception e ) { + throw new InvalidMappingException( "file", xmlFile.toString(), e ); + } + } + + /** + * Add a cached mapping file. A cached file is a serialized representation + * of the DOM structure of a particular mapping. It is saved from a previous + * call as a file with the name xmlFile + ".bin" where xmlFile is + * the name of the original mapping file. + *

    + * If a cached xmlFile + ".bin" exists and is newer than + * xmlFile the ".bin" file will be read directly. Otherwise + * xmlFile is read and then serialized to xmlFile + ".bin" for use + * the next time. + * + * @param xmlFile The cacheable mapping file to be added. + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems reading the cached file or processing + * the non-cached file. + */ + public Configuration addCacheableFile(File xmlFile) throws MappingException { + try { + File cachedFile = new File( xmlFile.getAbsolutePath() + ".bin" ); + org.dom4j.Document doc = null; + + final boolean useCachedFile = xmlFile.exists() && + cachedFile.exists() && + xmlFile.lastModified() < cachedFile.lastModified(); + + if ( useCachedFile ) { + try { + log.info( "Reading mappings from cache file: " + cachedFile ); + doc = ( org.dom4j.Document ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) ); + } + catch ( SerializationException e ) { + log.warn( "Could not deserialize cache file: " + cachedFile.getPath(), e ); + } + catch ( FileNotFoundException e ) { + log.warn( "I/O reported cached file could not be found : " + cachedFile.getPath(), e ); + } + } + + // if doc is null, then for whatever reason, the cached file cannot be used... + if ( doc == null ) { + if ( !xmlFile.exists() ) { + throw new MappingNotFoundException( "file", xmlFile.toString() ); + } + + log.info( "Reading mappings from file: " + xmlFile ); + List errors = new ArrayList(); + try { + doc = xmlHelper.createSAXReader( xmlFile.getAbsolutePath(), errors, entityResolver ).read( xmlFile ); + if ( errors.size() != 0 ) { + throw new MappingException( "invalid mapping", ( Throwable ) errors.get( 0 ) ); + } + } + catch( DocumentException e){ + throw new MappingException( "invalid mapping", e ); + } + + try { + log.debug( "Writing cache file for: " + xmlFile + " to: " + cachedFile ); + SerializationHelper.serialize( ( Serializable ) doc, new FileOutputStream( cachedFile ) ); + } + catch ( SerializationException e ) { + log.warn( "Could not write cached file: " + cachedFile, e ); + } + catch ( FileNotFoundException e ) { + log.warn( "I/O reported error writing cached file : " + cachedFile.getPath(), e ); + } + } + + add( doc ); + return this; + + } + catch ( InvalidMappingException e ) { + throw e; + } + catch ( MappingNotFoundException e ) { + throw e; + } + catch ( Exception e ) { + throw new InvalidMappingException( "file", xmlFile.toString(), e ); + } + } + + /** + * Add a cacheable mapping file. + * + * @param xmlFile The name of the file to be added. This must be in a form + * useable to simply construct a {@link java.io.File} instance. + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems reading the cached file or processing + * the non-cached file. + * @see #addCacheableFile(java.io.File) + */ + public Configuration addCacheableFile(String xmlFile) throws MappingException { + return addCacheableFile( new File( xmlFile ) ); + } + + + /** + * Read mappings from a String + * + * @param xml an XML string + * @return this (for method chaining purposes) + * @throws org.hibernate.MappingException Indicates problems parsing the + * given XML string + */ + public Configuration addXML(String xml) throws MappingException { + if ( log.isDebugEnabled() ) { + log.debug( "Mapping XML:\n" + xml ); + } + try { + List errors = new ArrayList(); + org.dom4j.Document doc = xmlHelper.createSAXReader( "XML String", errors, entityResolver ) + .read( new StringReader( xml ) ); + if ( errors.size() != 0 ) { + throw new MappingException( "invalid mapping", (Throwable) errors.get( 0 ) ); + } + add( doc ); + } + catch (DocumentException e) { + throw new MappingException( "Could not parse mapping document in XML string", e ); + } + return this; + } + + /** + * Read mappings from a URL + * + * @param url The url for the mapping document to be read. + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems reading the URL or processing + * the mapping document. + */ + public Configuration addURL(URL url) throws MappingException { + if ( log.isDebugEnabled() ) { + log.debug( "Reading mapping document from URL:" + url.toExternalForm() ); + } + try { + addInputStream( url.openStream() ); + } + catch ( InvalidMappingException e ) { + throw new InvalidMappingException( "URL", url.toExternalForm(), e.getCause() ); + } + catch (Exception e) { + throw new InvalidMappingException( "URL", url.toExternalForm(), e ); + } + return this; + } + + /** + * Read mappings from a DOM Document + * + * @param doc The DOM document + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems reading the DOM or processing + * the mapping document. + */ + public Configuration addDocument(Document doc) throws MappingException { + if ( log.isDebugEnabled() ) { + log.debug( "Mapping document:\n" + doc ); + } + add( xmlHelper.createDOMReader().read( doc ) ); + return this; + } + + /** + * Read mappings from an {@link java.io.InputStream}. + * + * @param xmlInputStream The input stream containing a DOM. + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems reading the stream, or + * processing the contained mapping document. + */ + public Configuration addInputStream(InputStream xmlInputStream) throws MappingException { + try { + List errors = new ArrayList(); + org.dom4j.Document doc = xmlHelper.createSAXReader( "XML InputStream", errors, entityResolver ) + .read( new InputSource( xmlInputStream ) ); + if ( errors.size() != 0 ) { + throw new InvalidMappingException( "invalid mapping", null, (Throwable) errors.get( 0 ) ); + } + add( doc ); + return this; + } + catch (DocumentException e) { + throw new InvalidMappingException( "input stream", null, e ); + } + finally { + try { + xmlInputStream.close(); + } + catch (IOException ioe) { + log.warn( "Could not close input stream", ioe ); + } + } + } + + /** + * Read mappings as a application resource (i.e. classpath lookup). + * + * @param resourceName The resource name + * @param classLoader The class loader to use. + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems locating the resource or + * processing the contained mapping document. + */ + public Configuration addResource(String resourceName, ClassLoader classLoader) throws MappingException { + log.info( "Reading mappings from resource: " + resourceName ); + InputStream rsrc = classLoader.getResourceAsStream( resourceName ); + if ( rsrc == null ) { + throw new MappingNotFoundException( "resource", resourceName ); + } + try { + return addInputStream( rsrc ); + } + catch (MappingException me) { + throw new InvalidMappingException( "resource", resourceName, me ); + } + } + + /** + * Read mappings as a application resourceName (i.e. classpath lookup) + * trying different classloaders. + * + * @param resourceName The resource name + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems locating the resource or + * processing the contained mapping document. + */ + public Configuration addResource(String resourceName) throws MappingException { + log.info( "Reading mappings from resource : " + resourceName ); + ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); + InputStream rsrc = null; + if (contextClassLoader!=null) { + rsrc = contextClassLoader.getResourceAsStream( resourceName ); + } + if ( rsrc == null ) { + rsrc = Environment.class.getClassLoader().getResourceAsStream( resourceName ); + } + if ( rsrc == null ) { + throw new MappingNotFoundException( "resource", resourceName ); + } + try { + return addInputStream( rsrc ); + } + catch (MappingException me) { + throw new InvalidMappingException( "resource", resourceName, me ); + } + } + + /** + * Read a mapping as an application resouurce using the convention that a class + * named foo.bar.Foo is mapped by a file foo/bar/Foo.hbm.xml + * which can be resolved as a classpath resource. + * + * @param persistentClass The mapped class + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems locating the resource or + * processing the contained mapping document. + */ + public Configuration addClass(Class persistentClass) throws MappingException { + String mappingResourceName = persistentClass.getName().replace( '.', '/' ) + ".hbm.xml"; + log.info( "Reading mappings from resource: " + mappingResourceName ); + return addResource( mappingResourceName, persistentClass.getClassLoader() ); + } + + /** + * Read all mappings from a jar file + *

    + * Assumes that any file named *.hbm.xml is a mapping document. + * + * @param jar a jar file + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems reading the jar file or + * processing the contained mapping documents. + */ + public Configuration addJar(File jar) throws MappingException { + log.info( "Searching for mapping documents in jar: " + jar.getName() ); + JarFile jarFile = null; + try { + try { + jarFile = new JarFile( jar ); + } + catch (IOException ioe) { + throw new InvalidMappingException( + "Could not read mapping documents from jar: " + jar.getName(), "jar", jar.getName(), + ioe + ); + } + Enumeration jarEntries = jarFile.entries(); + while ( jarEntries.hasMoreElements() ) { + ZipEntry ze = (ZipEntry) jarEntries.nextElement(); + if ( ze.getName().endsWith( ".hbm.xml" ) ) { + log.info( "Found mapping document in jar: " + ze.getName() ); + try { + addInputStream( jarFile.getInputStream( ze ) ); + } + catch (Exception e) { + throw new InvalidMappingException( + "Could not read mapping documents from jar: " + jar.getName(), + "jar", + jar.getName(), + e + ); + } + } + } + } + finally { + try { + if ( jarFile != null ) { + jarFile.close(); + } + } + catch (IOException ioe) { + log.error("could not close jar", ioe); + } + } + + return this; + } + + /** + * Read all mapping documents from a directory tree. + *

    + * Assumes that any file named *.hbm.xml is a mapping document. + * + * @param dir The directory + * @return this (for method chaining purposes) + * @throws MappingException Indicates problems reading the jar file or + * processing the contained mapping documents. + */ + public Configuration addDirectory(File dir) throws MappingException { + File[] files = dir.listFiles(); + for ( int i = 0; i < files.length ; i++ ) { + if ( files[i].isDirectory() ) { + addDirectory( files[i] ); + } + else if ( files[i].getName().endsWith( ".hbm.xml" ) ) { + addFile( files[i] ); + } + } + return this; + } + + protected void add(org.dom4j.Document doc) throws MappingException { + HbmBinder.bindRoot( doc, createMappings(), CollectionHelper.EMPTY_MAP ); + } + + /** + * Create a new Mappings to add class and collection + * mappings to. + */ + public Mappings createMappings() { + return new Mappings( + classes, + collections, + tables, + namedQueries, + namedSqlQueries, + sqlResultSetMappings, + imports, + secondPasses, + propertyReferences, + namingStrategy, + typeDefs, + filterDefinitions, + extendsQueue, + auxiliaryDatabaseObjects, + tableNameBinding, + columnNameBindingPerTable + ); + } + + + private Iterator iterateGenerators(Dialect dialect) throws MappingException { + + TreeMap generators = new TreeMap(); + String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG ); + String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA ); + + Iterator iter = classes.values().iterator(); + while ( iter.hasNext() ) { + PersistentClass pc = (PersistentClass) iter.next(); + + if ( !pc.isInherited() ) { + + IdentifierGenerator ig = pc.getIdentifier() + .createIdentifierGenerator( + dialect, + defaultCatalog, + defaultSchema, + (RootClass) pc + ); + + if ( ig instanceof PersistentIdentifierGenerator ) { + generators.put( ( (PersistentIdentifierGenerator) ig ).generatorKey(), ig ); + } + + } + } + + iter = collections.values().iterator(); + while ( iter.hasNext() ) { + Collection collection = (Collection) iter.next(); + + if ( collection.isIdentified() ) { + + IdentifierGenerator ig = ( (IdentifierCollection) collection ).getIdentifier() + .createIdentifierGenerator( + dialect, + defaultCatalog, + defaultSchema, + null + ); + + if ( ig instanceof PersistentIdentifierGenerator ) { + generators.put( ( (PersistentIdentifierGenerator) ig ).generatorKey(), ig ); + } + + } + } + + return generators.values().iterator(); + } + + /** + * Generate DDL for dropping tables + * + * @see org.hibernate.tool.hbm2ddl.SchemaExport + */ + public String[] generateDropSchemaScript(Dialect dialect) throws HibernateException { + + secondPassCompile(); + + String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG ); + String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA ); + + ArrayList script = new ArrayList( 50 ); + + // drop them in reverse order in case db needs it done that way... + ListIterator itr = auxiliaryDatabaseObjects.listIterator( auxiliaryDatabaseObjects.size() ); + while ( itr.hasPrevious() ) { + AuxiliaryDatabaseObject object = (AuxiliaryDatabaseObject) itr.previous(); + if ( object.appliesToDialect( dialect ) ) { + script.add( object.sqlDropString( dialect, defaultCatalog, defaultSchema ) ); + } + } + + if ( dialect.dropConstraints() ) { + Iterator iter = getTableMappings(); + while ( iter.hasNext() ) { + Table table = (Table) iter.next(); + if ( table.isPhysicalTable() ) { + Iterator subIter = table.getForeignKeyIterator(); + while ( subIter.hasNext() ) { + ForeignKey fk = (ForeignKey) subIter.next(); + if ( fk.isPhysicalConstraint() ) { + script.add( + fk.sqlDropString( + dialect, + defaultCatalog, + defaultSchema + ) + ); + } + } + } + } + } + + + Iterator iter = getTableMappings(); + while ( iter.hasNext() ) { + + Table table = (Table) iter.next(); + if ( table.isPhysicalTable() ) { + + /*Iterator subIter = table.getIndexIterator(); + while ( subIter.hasNext() ) { + Index index = (Index) subIter.next(); + if ( !index.isForeignKey() || !dialect.hasImplicitIndexForForeignKey() ) { + script.add( index.sqlDropString(dialect) ); + } + }*/ + + script.add( + table.sqlDropString( + dialect, + defaultCatalog, + defaultSchema + ) + ); + + } + + } + + iter = iterateGenerators( dialect ); + while ( iter.hasNext() ) { + String[] lines = ( (PersistentIdentifierGenerator) iter.next() ).sqlDropStrings( dialect ); + for ( int i = 0; i < lines.length ; i++ ) { + script.add( lines[i] ); + } + } + + return ArrayHelper.toStringArray( script ); + } + + /** + * Generate DDL for creating tables + * + * @see org.hibernate.tool.hbm2ddl.SchemaExport + */ + public String[] generateSchemaCreationScript(Dialect dialect) throws HibernateException { + secondPassCompile(); + + ArrayList script = new ArrayList( 50 ); + String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG ); + String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA ); + + Iterator iter = getTableMappings(); + while ( iter.hasNext() ) { + Table table = (Table) iter.next(); + if ( table.isPhysicalTable() ) { + script.add( + table.sqlCreateString( + dialect, + mapping, + defaultCatalog, + defaultSchema + ) + ); + Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema ); + while ( comments.hasNext() ) { + script.add( comments.next() ); + } + } + } + + iter = getTableMappings(); + while ( iter.hasNext() ) { + Table table = (Table) iter.next(); + if ( table.isPhysicalTable() ) { + + if ( !dialect.supportsUniqueConstraintInCreateAlterTable() ) { + Iterator subIter = table.getUniqueKeyIterator(); + while ( subIter.hasNext() ) { + UniqueKey uk = (UniqueKey) subIter.next(); + String constraintString = uk.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema ); + if (constraintString != null) script.add( constraintString ); + } + } + + + Iterator subIter = table.getIndexIterator(); + while ( subIter.hasNext() ) { + Index index = (Index) subIter.next(); + script.add( + index.sqlCreateString( + dialect, + mapping, + defaultCatalog, + defaultSchema + ) + ); + } + + if ( dialect.hasAlterTable() ) { + subIter = table.getForeignKeyIterator(); + while ( subIter.hasNext() ) { + ForeignKey fk = (ForeignKey) subIter.next(); + if ( fk.isPhysicalConstraint() ) { + script.add( + fk.sqlCreateString( + dialect, mapping, + defaultCatalog, + defaultSchema + ) + ); + } + } + } + + } + } + + iter = iterateGenerators( dialect ); + while ( iter.hasNext() ) { + String[] lines = ( (PersistentIdentifierGenerator) iter.next() ).sqlCreateStrings( dialect ); + for ( int i = 0; i < lines.length ; i++ ) { + script.add( lines[i] ); + } + } + + Iterator itr = auxiliaryDatabaseObjects.iterator(); + while ( itr.hasNext() ) { + AuxiliaryDatabaseObject object = (AuxiliaryDatabaseObject) itr.next(); + if ( object.appliesToDialect( dialect ) ) { + script.add( object.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema ) ); + } + } + + return ArrayHelper.toStringArray( script ); + } + + /** + * Generate DDL for altering tables + * + * @see org.hibernate.tool.hbm2ddl.SchemaUpdate + */ + public String[] generateSchemaUpdateScript(Dialect dialect, DatabaseMetadata databaseMetadata) + throws HibernateException { + secondPassCompile(); + + String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG ); + String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA ); + + ArrayList script = new ArrayList( 50 ); + + Iterator iter = getTableMappings(); + while ( iter.hasNext() ) { + Table table = (Table) iter.next(); + if ( table.isPhysicalTable() ) { + + TableMetadata tableInfo = databaseMetadata.getTableMetadata( + table.getName(), + ( table.getSchema() == null ) ? defaultSchema : table.getSchema(), + ( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog(), + table.isQuoted() + + ); + if ( tableInfo == null ) { + script.add( + table.sqlCreateString( + dialect, + mapping, + defaultCatalog, + defaultSchema + ) + ); + } + else { + Iterator subiter = table.sqlAlterStrings( + dialect, + mapping, + tableInfo, + defaultCatalog, + defaultSchema + ); + while ( subiter.hasNext() ) { + script.add( subiter.next() ); + } + } + + Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema ); + while ( comments.hasNext() ) { + script.add( comments.next() ); + } + + } + } + + iter = getTableMappings(); + while ( iter.hasNext() ) { + Table table = (Table) iter.next(); + if ( table.isPhysicalTable() ) { + + TableMetadata tableInfo = databaseMetadata.getTableMetadata( + table.getName(), + table.getSchema(), + table.getCatalog(), + table.isQuoted() + ); + + if ( dialect.hasAlterTable() ) { + Iterator subIter = table.getForeignKeyIterator(); + while ( subIter.hasNext() ) { + ForeignKey fk = (ForeignKey) subIter.next(); + if ( fk.isPhysicalConstraint() ) { + boolean create = tableInfo == null || ( + tableInfo.getForeignKeyMetadata( fk.getName() ) == null && ( + //Icky workaround for MySQL bug: + !( dialect instanceof MySQLDialect ) || + tableInfo.getIndexMetadata( fk.getName() ) == null + ) + ); + if ( create ) { + script.add( + fk.sqlCreateString( + dialect, + mapping, + defaultCatalog, + defaultSchema + ) + ); + } + } + } + } + + } + + /*//broken, 'cos we don't generate these with names in SchemaExport + subIter = table.getIndexIterator(); + while ( subIter.hasNext() ) { + Index index = (Index) subIter.next(); + if ( !index.isForeignKey() || !dialect.hasImplicitIndexForForeignKey() ) { + if ( tableInfo==null || tableInfo.getIndexMetadata( index.getFilterName() ) == null ) { + script.add( index.sqlCreateString(dialect, mapping) ); + } + } + } + //broken, 'cos we don't generate these with names in SchemaExport + subIter = table.getUniqueKeyIterator(); + while ( subIter.hasNext() ) { + UniqueKey uk = (UniqueKey) subIter.next(); + if ( tableInfo==null || tableInfo.getIndexMetadata( uk.getFilterName() ) == null ) { + script.add( uk.sqlCreateString(dialect, mapping) ); + } + }*/ + } + + iter = iterateGenerators( dialect ); + while ( iter.hasNext() ) { + PersistentIdentifierGenerator generator = (PersistentIdentifierGenerator) iter.next(); + Object key = generator.generatorKey(); + if ( !databaseMetadata.isSequence( key ) && !databaseMetadata.isTable( key ) ) { + String[] lines = generator.sqlCreateStrings( dialect ); + for ( int i = 0; i < lines.length ; i++ ) { + script.add( lines[i] ); + } + } + } + + return ArrayHelper.toStringArray( script ); + } + + public void validateSchema(Dialect dialect, DatabaseMetadata databaseMetadata) + throws HibernateException { + secondPassCompile(); + + String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG ); + String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA ); + + Iterator iter = getTableMappings(); + while ( iter.hasNext() ) { + Table table = (Table) iter.next(); + if ( table.isPhysicalTable() ) { + + + TableMetadata tableInfo = databaseMetadata.getTableMetadata( + table.getName(), + ( table.getSchema() == null ) ? defaultSchema : table.getSchema(), + ( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog(), + table.isQuoted()); + if ( tableInfo == null ) { + throw new HibernateException( "Missing table: " + table.getName() ); + } + else { + table.validateColumns( dialect, mapping, tableInfo ); + } + + } + } + + iter = iterateGenerators( dialect ); + while ( iter.hasNext() ) { + PersistentIdentifierGenerator generator = (PersistentIdentifierGenerator) iter.next(); + Object key = generator.generatorKey(); + if ( !databaseMetadata.isSequence( key ) && !databaseMetadata.isTable( key ) ) { + throw new HibernateException( "Missing sequence or table: " + key ); + } + } + } + + private void validate() throws MappingException { + Iterator iter = classes.values().iterator(); + while ( iter.hasNext() ) { + ( (PersistentClass) iter.next() ).validate( mapping ); + } + iter = collections.values().iterator(); + while ( iter.hasNext() ) { + ( (Collection) iter.next() ).validate( mapping ); + } + } + + /** + * Call this to ensure the mappings are fully compiled/built. Usefull to ensure getting + * access to all information in the metamodel when calling e.g. getClassMappings(). + */ + public void buildMappings() { + secondPassCompile(); + } + + // This method may be called many times!! + protected void secondPassCompile() throws MappingException { + log.debug( "processing extends queue" ); + + processExtendsQueue(); + + log.debug( "processing collection mappings" ); + + Iterator iter = secondPasses.iterator(); + while ( iter.hasNext() ) { + SecondPass sp = (SecondPass) iter.next(); + if ( ! (sp instanceof QuerySecondPass) ) { + sp.doSecondPass( classes ); + iter.remove(); + } + } + + log.debug( "processing native query and ResultSetMapping mappings" ); + iter = secondPasses.iterator(); + while ( iter.hasNext() ) { + SecondPass sp = (SecondPass) iter.next(); + sp.doSecondPass( classes ); + iter.remove(); + } + + log.debug( "processing association property references" ); + + iter = propertyReferences.iterator(); + while ( iter.hasNext() ) { + Mappings.PropertyReference upr = (Mappings.PropertyReference) iter.next(); + + PersistentClass clazz = getClassMapping( upr.referencedClass ); + if ( clazz == null ) { + throw new MappingException( + "property-ref to unmapped class: " + + upr.referencedClass + ); + } + + Property prop = clazz.getReferencedProperty( upr.propertyName ); + if ( upr.unique ) { + ( (SimpleValue) prop.getValue() ).setAlternateUniqueKey( true ); + } + } + + //TODO: Somehow add the newly created foreign keys to the internal collection + + log.debug( "processing foreign key constraints" ); + + iter = getTableMappings(); + Set done = new HashSet(); + while ( iter.hasNext() ) { + secondPassCompileForeignKeys( (Table) iter.next(), done ); + } + + } + + /** + * Try to empty the extends queue. + */ + private void processExtendsQueue() { + // todo : would love to have this work on a notification basis + // where the successful binding of an entity/subclass would + // emit a notification which the extendsQueue entries could + // react to... + org.dom4j.Document document = findPossibleExtends(); + while ( document != null ) { + add( document ); + document = findPossibleExtends(); + } + + if ( extendsQueue.size() > 0 ) { +// Iterator iterator = extendsQueue.iterator(); + Iterator iterator = extendsQueue.keySet().iterator(); + StringBuffer buf = new StringBuffer( "Following superclasses referenced in extends not found: " ); + while ( iterator.hasNext() ) { + final ExtendsQueueEntry entry = ( ExtendsQueueEntry ) iterator.next(); + buf.append( entry.getExplicitName() ); + if ( entry.getMappingPackage() != null ) { + buf.append( "[" ).append( entry.getMappingPackage() ).append( "]" ); + } + if ( iterator.hasNext() ) { + buf.append( "," ); + } + } + throw new MappingException( buf.toString() ); + } + } + + /** + * Find the first possible element in the queue of extends. + */ + protected org.dom4j.Document findPossibleExtends() { +// Iterator iter = extendsQueue.iterator(); + Iterator iter = extendsQueue.keySet().iterator(); + while ( iter.hasNext() ) { + final ExtendsQueueEntry entry = ( ExtendsQueueEntry ) iter.next(); + if ( getClassMapping( entry.getExplicitName() ) != null ) { + // found + iter.remove(); + return entry.getDocument(); + } + else if ( getClassMapping( HbmBinder.getClassName( entry.getExplicitName(), entry.getMappingPackage() ) ) != null ) { + // found + iter.remove(); + return entry.getDocument(); + } + } + return null; + } + + protected void secondPassCompileForeignKeys(Table table, Set done) throws MappingException { + + table.createForeignKeys(); + + Iterator iter = table.getForeignKeyIterator(); + while ( iter.hasNext() ) { + + ForeignKey fk = (ForeignKey) iter.next(); + if ( !done.contains( fk ) ) { + done.add( fk ); + final String referencedEntityName = fk.getReferencedEntityName(); + if ( referencedEntityName == null ) { + throw new MappingException( + "An association from the table " + + fk.getTable().getName() + + " does not specify the referenced entity" + ); + } + if ( log.isDebugEnabled() ) { + log.debug( "resolving reference to class: " + referencedEntityName ); + } + PersistentClass referencedClass = (PersistentClass) classes.get( referencedEntityName ); + if ( referencedClass == null ) { + throw new MappingException( + "An association from the table " + + fk.getTable().getName() + + " refers to an unmapped class: " + + referencedEntityName + ); + } + if ( referencedClass.isJoinedSubclass() ) { + secondPassCompileForeignKeys( referencedClass.getSuperclass().getTable(), done ); + } + fk.setReferencedTable( referencedClass.getTable() ); + fk.alignColumns(); + } + } + } + + /** + * Get the named queries + */ + public Map getNamedQueries() { + return namedQueries; + } + + /** + * Instantiate a new SessionFactory, using the properties and + * mappings in this configuration. The SessionFactory will be + * immutable, so changes made to the Configuration after + * building the SessionFactory will not affect it. + * + * @return a new factory for Sessions + * @see org.hibernate.SessionFactory + */ + public SessionFactory buildSessionFactory() throws HibernateException { + log.debug( "Preparing to build session factory with filters : " + filterDefinitions ); + secondPassCompile(); + validate(); + Environment.verifyProperties( properties ); + Properties copy = new Properties(); + copy.putAll( properties ); + PropertiesHelper.resolvePlaceHolders( copy ); + Settings settings = buildSettings( copy ); + + return new SessionFactoryImpl( + this, + mapping, + settings, + getInitializedEventListeners() + ); + } + + private EventListeners getInitializedEventListeners() { + EventListeners result = (EventListeners) eventListeners.shallowCopy(); + result.initializeListeners( this ); + return result; + } + + /** + * Return the configured Interceptor + */ + public Interceptor getInterceptor() { + return interceptor; + } + + /** + * Get all properties + */ + public Properties getProperties() { + return properties; + } + + /** + * Configure an Interceptor + */ + public Configuration setInterceptor(Interceptor interceptor) { + this.interceptor = interceptor; + return this; + } + + /** + * Specify a completely new set of properties + */ + public Configuration setProperties(Properties properties) { + this.properties = properties; + return this; + } + + /** + * Set the given properties + */ + public Configuration addProperties(Properties extraProperties) { + this.properties.putAll( extraProperties ); + return this; + } + + /** + * Adds the incoming properties to the internap properties structure, + * as long as the internal structure does not already contain an + * entry for the given key. + * + * @param properties + * @return this + */ + public Configuration mergeProperties(Properties properties) { + Iterator itr = properties.entrySet().iterator(); + while ( itr.hasNext() ) { + final Map.Entry entry = ( Map.Entry ) itr.next(); + if ( this.properties.containsKey( entry.getKey() ) ) { + continue; + } + this.properties.setProperty( ( String ) entry.getKey(), ( String ) entry.getValue() ); + } + return this; + } + + /** + * Set a property + */ + public Configuration setProperty(String propertyName, String value) { + properties.setProperty( propertyName, value ); + return this; + } + + /** + * Get a property + */ + public String getProperty(String propertyName) { + return properties.getProperty( propertyName ); + } + + private void addProperties(Element parent) { + Iterator iter = parent.elementIterator( "property" ); + while ( iter.hasNext() ) { + Element node = (Element) iter.next(); + String name = node.attributeValue( "name" ); + String value = node.getText().trim(); + log.debug( name + "=" + value ); + properties.setProperty( name, value ); + if ( !name.startsWith( "hibernate" ) ) { + properties.setProperty( "hibernate." + name, value ); + } + } + Environment.verifyProperties( properties ); + } + + /** + * Get the configuration file as an InputStream. Might be overridden + * by subclasses to allow the configuration to be located by some arbitrary + * mechanism. + */ + protected InputStream getConfigurationInputStream(String resource) throws HibernateException { + + log.info( "Configuration resource: " + resource ); + + return ConfigHelper.getResourceAsStream( resource ); + + } + + /** + * Use the mappings and properties specified in an application + * resource named hibernate.cfg.xml. + */ + public Configuration configure() throws HibernateException { + configure( "/hibernate.cfg.xml" ); + return this; + } + + /** + * Use the mappings and properties specified in the given application + * resource. The format of the resource is defined in + * hibernate-configuration-3.0.dtd. + *

    + * The resource is found via getConfigurationInputStream(resource). + */ + public Configuration configure(String resource) throws HibernateException { + log.info( "configuring from resource: " + resource ); + InputStream stream = getConfigurationInputStream( resource ); + return doConfigure( stream, resource ); + } + + /** + * Use the mappings and properties specified in the given document. + * The format of the document is defined in + * hibernate-configuration-3.0.dtd. + * + * @param url URL from which you wish to load the configuration + * @return A configuration configured via the file + * @throws HibernateException + */ + public Configuration configure(URL url) throws HibernateException { + log.info( "configuring from url: " + url.toString() ); + try { + return doConfigure( url.openStream(), url.toString() ); + } + catch (IOException ioe) { + throw new HibernateException( "could not configure from URL: " + url, ioe ); + } + } + + /** + * Use the mappings and properties specified in the given application + * file. The format of the file is defined in + * hibernate-configuration-3.0.dtd. + * + * @param configFile File from which you wish to load the configuration + * @return A configuration configured via the file + * @throws HibernateException + */ + public Configuration configure(File configFile) throws HibernateException { + log.info( "configuring from file: " + configFile.getName() ); + try { + return doConfigure( new FileInputStream( configFile ), configFile.toString() ); + } + catch (FileNotFoundException fnfe) { + throw new HibernateException( "could not find file: " + configFile, fnfe ); + } + } + + /** + * Use the mappings and properties specified in the given application + * resource. The format of the resource is defined in + * hibernate-configuration-3.0.dtd. + * + * @param stream Inputstream to be read from + * @param resourceName The name to use in warning/error messages + * @return A configuration configured via the stream + * @throws HibernateException + */ + protected Configuration doConfigure(InputStream stream, String resourceName) throws HibernateException { + + org.dom4j.Document doc; + try { + List errors = new ArrayList(); + doc = xmlHelper.createSAXReader( resourceName, errors, entityResolver ) + .read( new InputSource( stream ) ); + if ( errors.size() != 0 ) { + throw new MappingException( + "invalid configuration", + (Throwable) errors.get( 0 ) + ); + } + } + catch (DocumentException e) { + throw new HibernateException( + "Could not parse configuration: " + resourceName, + e + ); + } + finally { + try { + stream.close(); + } + catch (IOException ioe) { + log.warn( "could not close input stream for: " + resourceName, ioe ); + } + } + + return doConfigure( doc ); + + } + + /** + * Use the mappings and properties specified in the given XML document. + * The format of the file is defined in + * hibernate-configuration-3.0.dtd. + * + * @param document an XML document from which you wish to load the configuration + * @return A configuration configured via the Document + * @throws HibernateException if there is problem in accessing the file. + */ + public Configuration configure(Document document) throws HibernateException { + log.info( "configuring from XML document" ); + return doConfigure( xmlHelper.createDOMReader().read( document ) ); + } + + protected Configuration doConfigure(org.dom4j.Document doc) throws HibernateException { + + Element sfNode = doc.getRootElement().element( "session-factory" ); + String name = sfNode.attributeValue( "name" ); + if ( name != null ) { + properties.setProperty( Environment.SESSION_FACTORY_NAME, name ); + } + addProperties( sfNode ); + parseSessionFactory( sfNode, name ); + + Element secNode = doc.getRootElement().element( "security" ); + if ( secNode != null ) { + parseSecurity( secNode ); + } + + log.info( "Configured SessionFactory: " + name ); + log.debug( "properties: " + properties ); + + return this; + + } + + + private void parseSessionFactory(Element sfNode, String name) { + Iterator elements = sfNode.elementIterator(); + while ( elements.hasNext() ) { + Element subelement = (Element) elements.next(); + String subelementName = subelement.getName(); + if ( "mapping".equals( subelementName ) ) { + parseMappingElement( subelement, name ); + } + else if ( "class-cache".equals( subelementName ) ) { + String className = subelement.attributeValue( "class" ); + Attribute regionNode = subelement.attribute( "region" ); + final String region = ( regionNode == null ) ? className : regionNode.getValue(); + boolean includeLazy = !"non-lazy".equals( subelement.attributeValue( "include" ) ); + setCacheConcurrencyStrategy( className, subelement.attributeValue( "usage" ), region, includeLazy ); + } + else if ( "collection-cache".equals( subelementName ) ) { + String role = subelement.attributeValue( "collection" ); + Attribute regionNode = subelement.attribute( "region" ); + final String region = ( regionNode == null ) ? role : regionNode.getValue(); + setCollectionCacheConcurrencyStrategy( role, subelement.attributeValue( "usage" ), region ); + } + else if ( "listener".equals( subelementName ) ) { + parseListener( subelement ); + } + else if ( "event".equals( subelementName ) ) { + parseEvent( subelement ); + } + } + } + + protected void parseMappingElement(Element subelement, String name) { + Attribute rsrc = subelement.attribute( "resource" ); + Attribute file = subelement.attribute( "file" ); + Attribute jar = subelement.attribute( "jar" ); + Attribute pkg = subelement.attribute( "package" ); + Attribute clazz = subelement.attribute( "class" ); + if ( rsrc != null ) { + log.debug( name + "<-" + rsrc ); + addResource( rsrc.getValue() ); + } + else if ( jar != null ) { + log.debug( name + "<-" + jar ); + addJar( new File( jar.getValue() ) ); + } + else if ( pkg != null ) { + throw new MappingException( + "An AnnotationConfiguration instance is required to use " + ); + } + else if ( clazz != null ) { + throw new MappingException( + "An AnnotationConfiguration instance is required to use " + ); + } + else { + if ( file == null ) { + throw new MappingException( + " element in configuration specifies no attributes" + ); + } + log.debug( name + "<-" + file ); + addFile( file.getValue() ); + } + } + + private void parseSecurity(Element secNode) { + String contextId = secNode.attributeValue( "context" ); + setProperty(Environment.JACC_CONTEXTID, contextId); + log.info( "JACC contextID: " + contextId ); + JACCConfiguration jcfg = new JACCConfiguration( contextId ); + Iterator grantElements = secNode.elementIterator(); + while ( grantElements.hasNext() ) { + Element grantElement = (Element) grantElements.next(); + String elementName = grantElement.getName(); + if ( "grant".equals( elementName ) ) { + jcfg.addPermission( + grantElement.attributeValue( "role" ), + grantElement.attributeValue( "entity-name" ), + grantElement.attributeValue( "actions" ) + ); + } + } + } + + private void parseEvent(Element element) { + String type = element.attributeValue( "type" ); + List listeners = element.elements(); + String[] listenerClasses = new String[ listeners.size() ]; + for ( int i = 0; i < listeners.size() ; i++ ) { + listenerClasses[i] = ( (Element) listeners.get( i ) ).attributeValue( "class" ); + } + log.debug( "Event listeners: " + type + "=" + StringHelper.toString( listenerClasses ) ); + setListeners( type, listenerClasses ); + } + + private void parseListener(Element element) { + String type = element.attributeValue( "type" ); + if ( type == null ) { + throw new MappingException( "No type specified for listener" ); + } + String impl = element.attributeValue( "class" ); + log.debug( "Event listener: " + type + "=" + impl ); + setListeners( type, new String[]{impl} ); + } + + public void setListeners(String type, String[] listenerClasses) { + Object[] listeners = (Object[]) Array.newInstance( eventListeners.getListenerClassFor(type), listenerClasses.length ); + for ( int i = 0; i < listeners.length ; i++ ) { + try { + listeners[i] = ReflectHelper.classForName( listenerClasses[i] ).newInstance(); + } + catch (Exception e) { + throw new MappingException( + "Unable to instantiate specified event (" + type + ") listener class: " + listenerClasses[i], + e + ); + } + } + setListeners( type, listeners ); + } + + public void setListener(String type, Object listener) { + if ( listener == null ) { + setListener( type, null ); + } + else { + Object[] listeners = (Object[]) Array.newInstance( eventListeners.getListenerClassFor(type), 1 ); + listeners[0] = listener; + setListeners( type, listeners ); + } + } + + public void setListeners(String type, Object[] listeners) { + if ( "auto-flush".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setAutoFlushEventListeners( new AutoFlushEventListener[]{} ); + } + else { + eventListeners.setAutoFlushEventListeners( (AutoFlushEventListener[]) listeners ); + } + } + else if ( "merge".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setMergeEventListeners( new MergeEventListener[]{} ); + } + else { + eventListeners.setMergeEventListeners( (MergeEventListener[]) listeners ); + } + } + else if ( "create".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPersistEventListeners( new PersistEventListener[]{} ); + } + else { + eventListeners.setPersistEventListeners( (PersistEventListener[]) listeners ); + } + } + else if ( "create-onflush".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPersistOnFlushEventListeners( new PersistEventListener[]{} ); + } + else { + eventListeners.setPersistOnFlushEventListeners( (PersistEventListener[]) listeners ); + } + } + else if ( "delete".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setDeleteEventListeners( new DeleteEventListener[]{} ); + } + else { + eventListeners.setDeleteEventListeners( (DeleteEventListener[]) listeners ); + } + } + else if ( "dirty-check".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setDirtyCheckEventListeners( new DirtyCheckEventListener[]{} ); + } + else { + eventListeners.setDirtyCheckEventListeners( (DirtyCheckEventListener[]) listeners ); + } + } + else if ( "evict".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setEvictEventListeners( new EvictEventListener[]{} ); + } + else { + eventListeners.setEvictEventListeners( (EvictEventListener[]) listeners ); + } + } + else if ( "flush".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setFlushEventListeners( new FlushEventListener[]{} ); + } + else { + eventListeners.setFlushEventListeners( (FlushEventListener[]) listeners ); + } + } + else if ( "flush-entity".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setFlushEntityEventListeners( new FlushEntityEventListener[]{} ); + } + else { + eventListeners.setFlushEntityEventListeners( (FlushEntityEventListener[]) listeners ); + } + } + else if ( "load".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setLoadEventListeners( new LoadEventListener[]{} ); + } + else { + eventListeners.setLoadEventListeners( (LoadEventListener[]) listeners ); + } + } + else if ( "load-collection".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setInitializeCollectionEventListeners( + new InitializeCollectionEventListener[]{} + ); + } + else { + eventListeners.setInitializeCollectionEventListeners( + (InitializeCollectionEventListener[]) listeners + ); + } + } + else if ( "lock".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setLockEventListeners( new LockEventListener[]{} ); + } + else { + eventListeners.setLockEventListeners( (LockEventListener[]) listeners ); + } + } + else if ( "refresh".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setRefreshEventListeners( new RefreshEventListener[]{} ); + } + else { + eventListeners.setRefreshEventListeners( (RefreshEventListener[]) listeners ); + } + } + else if ( "replicate".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setReplicateEventListeners( new ReplicateEventListener[]{} ); + } + else { + eventListeners.setReplicateEventListeners( (ReplicateEventListener[]) listeners ); + } + } + else if ( "save-update".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setSaveOrUpdateEventListeners( new SaveOrUpdateEventListener[]{} ); + } + else { + eventListeners.setSaveOrUpdateEventListeners( (SaveOrUpdateEventListener[]) listeners ); + } + } + else if ( "save".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setSaveEventListeners( new SaveOrUpdateEventListener[]{} ); + } + else { + eventListeners.setSaveEventListeners( (SaveOrUpdateEventListener[]) listeners ); + } + } + else if ( "update".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setUpdateEventListeners( new SaveOrUpdateEventListener[]{} ); + } + else { + eventListeners.setUpdateEventListeners( (SaveOrUpdateEventListener[]) listeners ); + } + } + else if ( "pre-load".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPreLoadEventListeners( new PreLoadEventListener[]{} ); + } + else { + eventListeners.setPreLoadEventListeners( (PreLoadEventListener[]) listeners ); + } + } + else if ( "pre-update".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPreUpdateEventListeners( new PreUpdateEventListener[]{} ); + } + else { + eventListeners.setPreUpdateEventListeners( (PreUpdateEventListener[]) listeners ); + } + } + else if ( "pre-delete".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPreDeleteEventListeners( new PreDeleteEventListener[]{} ); + } + else { + eventListeners.setPreDeleteEventListeners( (PreDeleteEventListener[]) listeners ); + } + } + else if ( "pre-insert".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPreInsertEventListeners( new PreInsertEventListener[]{} ); + } + else { + eventListeners.setPreInsertEventListeners( (PreInsertEventListener[]) listeners ); + } + } + else if ( "post-load".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPostLoadEventListeners( new PostLoadEventListener[]{} ); + } + else { + eventListeners.setPostLoadEventListeners( (PostLoadEventListener[]) listeners ); + } + } + else if ( "post-update".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPostUpdateEventListeners( new PostUpdateEventListener[]{} ); + } + else { + eventListeners.setPostUpdateEventListeners( (PostUpdateEventListener[]) listeners ); + } + } + else if ( "post-delete".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPostDeleteEventListeners( new PostDeleteEventListener[]{} ); + } + else { + eventListeners.setPostDeleteEventListeners( (PostDeleteEventListener[]) listeners ); + } + } + else if ( "post-insert".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPostInsertEventListeners( new PostInsertEventListener[]{} ); + } + else { + eventListeners.setPostInsertEventListeners( (PostInsertEventListener[]) listeners ); + } + } + else if ( "post-commit-update".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPostCommitUpdateEventListeners( + new PostUpdateEventListener[]{} + ); + } + else { + eventListeners.setPostCommitUpdateEventListeners( (PostUpdateEventListener[]) listeners ); + } + } + else if ( "post-commit-delete".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPostCommitDeleteEventListeners( + new PostDeleteEventListener[]{} + ); + } + else { + eventListeners.setPostCommitDeleteEventListeners( (PostDeleteEventListener[]) listeners ); + } + } + else if ( "post-commit-insert".equals( type ) ) { + if ( listeners == null ) { + eventListeners.setPostCommitInsertEventListeners( + new PostInsertEventListener[]{} + ); + } + else { + eventListeners.setPostCommitInsertEventListeners( (PostInsertEventListener[]) listeners ); + } + } + else { + log.warn( "Unrecognized listener type [" + type + "]" ); + } + } + + public EventListeners getEventListeners() { + return eventListeners; + } + + RootClass getRootClassMapping(String clazz) throws MappingException { + try { + return (RootClass) getClassMapping( clazz ); + } + catch (ClassCastException cce) { + throw new MappingException( "You may only specify a cache for root mappings" ); + } + } + + /** + * Set up a cache for an entity class + * + * @param clazz + * @param concurrencyStrategy + * @return Configuration + * @throws MappingException + */ + public Configuration setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy) + throws MappingException { + setCacheConcurrencyStrategy( clazz, concurrencyStrategy, clazz ); + return this; + } + + public void setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy, String region) + throws MappingException { + setCacheConcurrencyStrategy( clazz, concurrencyStrategy, region, true ); + } + + void setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy, String region, boolean includeLazy) + throws MappingException { + RootClass rootClass = getRootClassMapping( clazz ); + if ( rootClass == null ) { + throw new MappingException( "Cannot cache an unknown entity: " + clazz ); + } + rootClass.setCacheConcurrencyStrategy( concurrencyStrategy ); + rootClass.setCacheRegionName( region ); + rootClass.setLazyPropertiesCacheable( includeLazy ); + } + + /** + * Set up a cache for a collection role + * + * @param collectionRole + * @param concurrencyStrategy + * @return Configuration + * @throws MappingException + */ + public Configuration setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy) + throws MappingException { + setCollectionCacheConcurrencyStrategy( collectionRole, concurrencyStrategy, collectionRole ); + return this; + } + + public void setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy, String region) + throws MappingException { + Collection collection = getCollectionMapping( collectionRole ); + if ( collection == null ) { + throw new MappingException( "Cannot cache an unknown collection: " + collectionRole ); + } + collection.setCacheConcurrencyStrategy( concurrencyStrategy ); + collection.setCacheRegionName( region ); + } + + /** + * Get the query language imports + * + * @return a mapping from "import" names to fully qualified class names + */ + public Map getImports() { + return imports; + } + + /** + * Create an object-oriented view of the configuration properties + */ + public Settings buildSettings() throws HibernateException { + Properties clone = ( Properties ) properties.clone(); + PropertiesHelper.resolvePlaceHolders( clone ); + return settingsFactory.buildSettings( clone ); + } + + public Settings buildSettings(Properties props) throws HibernateException { + return settingsFactory.buildSettings( props ); + } + + public Map getNamedSQLQueries() { + return namedSqlQueries; + } + + public Map getSqlResultSetMappings() { + return sqlResultSetMappings; + } + + /** + * @return the NamingStrategy. + */ + public NamingStrategy getNamingStrategy() { + return namingStrategy; + } + + /** + * Set a custom naming strategy + * + * @param namingStrategy the NamingStrategy to set + */ + public Configuration setNamingStrategy(NamingStrategy namingStrategy) { + this.namingStrategy = namingStrategy; + return this; + } + + public Mapping buildMapping() { + return new Mapping() { + /** + * Returns the identifier type of a mapped class + */ + public Type getIdentifierType(String persistentClass) throws MappingException { + PersistentClass pc = ( (PersistentClass) classes.get( persistentClass ) ); + if ( pc == null ) { + throw new MappingException( "persistent class not known: " + persistentClass ); + } + return pc.getIdentifier().getType(); + } + + public String getIdentifierPropertyName(String persistentClass) throws MappingException { + final PersistentClass pc = (PersistentClass) classes.get( persistentClass ); + if ( pc == null ) { + throw new MappingException( "persistent class not known: " + persistentClass ); + } + if ( !pc.hasIdentifierProperty() ) { + return null; + } + return pc.getIdentifierProperty().getName(); + } + + public Type getReferencedPropertyType(String persistentClass, String propertyName) throws MappingException { + final PersistentClass pc = (PersistentClass) classes.get( persistentClass ); + if ( pc == null ) { + throw new MappingException( "persistent class not known: " + persistentClass ); + } + Property prop = pc.getReferencedProperty( propertyName ); + if ( prop == null ) { + throw new MappingException( + "property not known: " + + persistentClass + '.' + propertyName + ); + } + return prop.getType(); + } + }; + } + + private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { + ois.defaultReadObject(); + this.mapping = buildMapping(); + xmlHelper = new XMLHelper(); + } + + public Map getFilterDefinitions() { + return filterDefinitions; + } + + public void addFilterDefinition(FilterDefinition definition) { + filterDefinitions.put( definition.getFilterName(), definition ); + } + + public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject object) { + auxiliaryDatabaseObjects.add( object ); + } + + public Map getSqlFunctions() { + return sqlFunctions; + } + + public void addSqlFunction(String functionName, SQLFunction function) { + sqlFunctions.put( functionName, function ); + } +} diff --git a/src/org/hibernate/cfg/DefaultNamingStrategy.java b/src/org/hibernate/cfg/DefaultNamingStrategy.java new file mode 100644 index 0000000000..d9b68c43a0 --- /dev/null +++ b/src/org/hibernate/cfg/DefaultNamingStrategy.java @@ -0,0 +1,110 @@ +//$Id$ +package org.hibernate.cfg; + +import java.io.Serializable; + +import org.hibernate.util.StringHelper; +import org.hibernate.AssertionFailure; + +/** + * The default NamingStrategy + * @see ImprovedNamingStrategy a better alternative + * @author Gavin King + */ +public class DefaultNamingStrategy implements NamingStrategy, Serializable { + + /** + * The singleton instance + */ + public static final NamingStrategy INSTANCE = new DefaultNamingStrategy(); + + /** + * Return the unqualified class name + */ + public String classToTableName(String className) { + return StringHelper.unqualify(className); + } + /** + * Return the unqualified property name + */ + public String propertyToColumnName(String propertyName) { + return StringHelper.unqualify(propertyName); + } + /** + * Return the argument + */ + public String tableName(String tableName) { + return tableName; + } + /** + * Return the argument + */ + public String columnName(String columnName) { + return columnName; + } + + /** + * Return the unqualified property name, not the best strategy but a backward compatible one + */ + public String collectionTableName( + String ownerEntity, String ownerEntityTable, String associatedEntity, String associatedEntityTable, + String propertyName + ) { + //use a degenerated strategy for backward compatibility + return StringHelper.unqualify(propertyName); + } + + /** + * Return the argument + */ + public String joinKeyColumnName(String joinedColumn, String joinedTable) { + return columnName( joinedColumn ); + } + + /** + * Return the property name or propertyTableName + */ + public String foreignKeyColumnName( + String propertyName, String propertyEntityName, String propertyTableName, String referencedColumnName + ) { + String header = propertyName != null ? StringHelper.unqualify( propertyName ) : propertyTableName; + if (header == null) throw new AssertionFailure("NammingStrategy not properly filled"); + return columnName( header ); //+ "_" + referencedColumnName not used for backward compatibility + } + + /** + * Return the column name or the unqualified property name + */ + public String logicalColumnName(String columnName, String propertyName) { + return StringHelper.isNotEmpty( columnName ) ? columnName : StringHelper.unqualify( propertyName ); + } + + /** + * Returns either the table name if explicit or + * if there is an associated table, the concatenation of owner entity table and associated table + * otherwise the concatenation of owner entity table and the unqualified property name + */ + public String logicalCollectionTableName(String tableName, + String ownerEntityTable, String associatedEntityTable, String propertyName + ) { + if ( tableName != null ) { + return tableName; + } + else { + //use of a stringbuffer to workaround a JDK bug + return new StringBuffer(ownerEntityTable).append("_") + .append( + associatedEntityTable != null ? + associatedEntityTable : + StringHelper.unqualify( propertyName ) + ).toString(); + } + } + /** + * Return the column name if explicit or the concatenation of the property name and the referenced column + * + */ + public String logicalCollectionColumnName(String columnName, String propertyName, String referencedColumn) { + return StringHelper.isNotEmpty( columnName ) ? columnName : propertyName + "_" + referencedColumn; + } +} \ No newline at end of file diff --git a/src/org/hibernate/cfg/Environment.java b/src/org/hibernate/cfg/Environment.java new file mode 100644 index 0000000000..7b9a0e7807 --- /dev/null +++ b/src/org/hibernate/cfg/Environment.java @@ -0,0 +1,698 @@ +//$Id$ +package org.hibernate.cfg; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.Connection; +import java.sql.Statement; +import java.sql.Timestamp; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.bytecode.BytecodeProvider; +import org.hibernate.util.ConfigHelper; +import org.hibernate.util.PropertiesHelper; + + +/** + * Provides access to configuration info passed in Properties objects. + *

    + * Hibernate has two property scopes: + *

      + *
    • Factory-level properties may be passed to the SessionFactory when it + * instantiated. Each instance might have different property values. If no + * properties are specified, the factory calls Environment.getProperties(). + *
    • System-level properties are shared by all factory instances and are always + * determined by the Environment properties. + *
    + * The only system-level properties are + *
      + *
    • hibernate.jdbc.use_streams_for_binary + *
    • hibernate.cglib.use_reflection_optimizer + *
    + * Environment properties are populated by calling System.getProperties() + * and then from a resource named /hibernate.properties if it exists. System + * properties override properties specified in hibernate.properties.
    + *
    + * The SessionFactory is controlled by the following properties. + * Properties may be either be System properties, properties + * defined in a resource named /hibernate.properties or an instance of + * java.util.Properties passed to + * Configuration.buildSessionFactory()
    + *
    + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
    propertymeaning
    hibernate.dialectclassname of org.hibernate.dialect.Dialect subclass
    hibernate.cache.provider_classclassname of org.hibernate.cache.CacheProvider + * subclass (if not specified EHCache is used)
    hibernate.connection.provider_classclassname of org.hibernate.connection.ConnectionProvider + * subclass (if not specified hueristics are used)
    hibernate.connection.usernamedatabase username
    hibernate.connection.passworddatabase password
    hibernate.connection.urlJDBC URL (when using java.sql.DriverManager)
    hibernate.connection.driver_classclassname of JDBC driver
    hibernate.connection.isolationJDBC transaction isolation level (only when using + * java.sql.DriverManager) + *
    hibernate.connection.pool_sizethe maximum size of the connection pool (only when using + * java.sql.DriverManager) + *
    hibernate.connection.datasourcedatabasource JNDI name (when using javax.sql.Datasource)
    hibernate.jndi.urlJNDI InitialContext URL
    hibernate.jndi.classJNDI InitialContext classname
    hibernate.max_fetch_depthmaximum depth of outer join fetching
    hibernate.jdbc.batch_sizeenable use of JDBC2 batch API for drivers which support it
    hibernate.jdbc.fetch_sizeset the JDBC fetch size
    hibernate.jdbc.use_scrollable_resultsetenable use of JDBC2 scrollable resultsets (you only need this specify + * this property when using user supplied connections)
    hibernate.jdbc.use_getGeneratedKeysenable use of JDBC3 PreparedStatement.getGeneratedKeys() to retrieve + * natively generated keys after insert. Requires JDBC3+ driver and JRE1.4+
    hibernate.hbm2ddl.autoenable auto DDL export
    hibernate.default_schemause given schema name for unqualified tables (always optional)
    hibernate.default_cataloguse given catalog name for unqualified tables (always optional)
    hibernate.session_factory_nameIf set, the factory attempts to bind this name to itself in the + * JNDI context. This name is also used to support cross JVM + * Session (de)serialization.
    hibernate.transaction.manager_lookup_classclassname of org.hibernate.transaction.TransactionManagerLookup + * implementor
    hibernate.transaction.factory_classthe factory to use for instantiating Transactions. + * (Defaults to JDBCTransactionFactory.)
    hibernate.query.substitutionsquery language token substitutions
    + * + * @see org.hibernate.SessionFactory + * @author Gavin King + */ +public final class Environment { + + public static final String VERSION = "3.2.4.sp1"; + + /** + * ConnectionProvider implementor to use when obtaining connections + */ + public static final String CONNECTION_PROVIDER ="hibernate.connection.provider_class"; + /** + * JDBC driver class + */ + public static final String DRIVER ="hibernate.connection.driver_class"; + /** + * JDBC transaction isolation level + */ + public static final String ISOLATION ="hibernate.connection.isolation"; + /** + * JDBC URL + */ + public static final String URL ="hibernate.connection.url"; + /** + * JDBC user + */ + public static final String USER ="hibernate.connection.username"; + /** + * JDBC password + */ + public static final String PASS ="hibernate.connection.password"; + /** + * JDBC autocommit mode + */ + public static final String AUTOCOMMIT ="hibernate.connection.autocommit"; + /** + * Maximum number of inactive connections for Hibernate's connection pool + */ + public static final String POOL_SIZE ="hibernate.connection.pool_size"; + /** + * java.sql.Datasource JNDI name + */ + public static final String DATASOURCE ="hibernate.connection.datasource"; + /** + * prefix for arbitrary JDBC connection properties + */ + public static final String CONNECTION_PREFIX = "hibernate.connection"; + + /** + * JNDI initial context class, Context.INITIAL_CONTEXT_FACTORY + */ + public static final String JNDI_CLASS ="hibernate.jndi.class"; + /** + * JNDI provider URL, Context.PROVIDER_URL + */ + public static final String JNDI_URL ="hibernate.jndi.url"; + /** + * prefix for arbitrary JNDI InitialContext properties + */ + public static final String JNDI_PREFIX = "hibernate.jndi"; + /** + * JNDI name to bind to SessionFactory + */ + public static final String SESSION_FACTORY_NAME = "hibernate.session_factory_name"; + + /** + * Hibernate SQL Dialect class + */ + public static final String DIALECT ="hibernate.dialect"; + /** + * A default database schema (owner) name to use for unqualified tablenames + */ + public static final String DEFAULT_SCHEMA = "hibernate.default_schema"; + /** + * A default database catalog name to use for unqualified tablenames + */ + public static final String DEFAULT_CATALOG = "hibernate.default_catalog"; + + /** + * Enable logging of generated SQL to the console + */ + public static final String SHOW_SQL ="hibernate.show_sql"; + /** + * Enable formatting of SQL logged to the console + */ + public static final String FORMAT_SQL ="hibernate.format_sql"; + /** + * Add comments to the generated SQL + */ + public static final String USE_SQL_COMMENTS ="hibernate.use_sql_comments"; + /** + * Maximum depth of outer join fetching + */ + public static final String MAX_FETCH_DEPTH = "hibernate.max_fetch_depth"; + /** + * The default batch size for batch fetching + */ + public static final String DEFAULT_BATCH_FETCH_SIZE = "hibernate.default_batch_fetch_size"; + /** + * Use java.io streams to read / write binary data from / to JDBC + */ + public static final String USE_STREAMS_FOR_BINARY = "hibernate.jdbc.use_streams_for_binary"; + /** + * Use JDBC scrollable ResultSets. This property is only necessary when there is + * no ConnectionProvider, ie. the user is supplying JDBC connections. + */ + public static final String USE_SCROLLABLE_RESULTSET = "hibernate.jdbc.use_scrollable_resultset"; + /** + * Tells the JDBC driver to attempt to retrieve row Id with the JDBC 3.0 PreparedStatement.getGeneratedKeys() + * method. In general, performance will be better if this property is set to true and the underlying + * JDBC driver supports getGeneratedKeys(). + */ + public static final String USE_GET_GENERATED_KEYS = "hibernate.jdbc.use_get_generated_keys"; + /** + * Gives the JDBC driver a hint as to the number of rows that should be fetched from the database + * when more rows are needed. If 0, JDBC driver default settings will be used. + */ + public static final String STATEMENT_FETCH_SIZE = "hibernate.jdbc.fetch_size"; + /** + * Maximum JDBC batch size. A nonzero value enables batch updates. + */ + public static final String STATEMENT_BATCH_SIZE = "hibernate.jdbc.batch_size"; + /** + * Select a custom batcher. + */ + public static final String BATCH_STRATEGY = "hibernate.jdbc.factory_class"; + /** + * Should versioned data be included in batching? + */ + public static final String BATCH_VERSIONED_DATA = "hibernate.jdbc.batch_versioned_data"; + /** + * An XSLT resource used to generate "custom" XML + */ + public static final String OUTPUT_STYLESHEET ="hibernate.xml.output_stylesheet"; + + /** + * Maximum size of C3P0 connection pool + */ + public static final String C3P0_MAX_SIZE = "hibernate.c3p0.max_size"; + /** + * Minimum size of C3P0 connection pool + */ + public static final String C3P0_MIN_SIZE = "hibernate.c3p0.min_size"; + + /** + * Maximum idle time for C3P0 connection pool + */ + public static final String C3P0_TIMEOUT = "hibernate.c3p0.timeout"; + /** + * Maximum size of C3P0 statement cache + */ + public static final String C3P0_MAX_STATEMENTS = "hibernate.c3p0.max_statements"; + /** + * Number of connections acquired when pool is exhausted + */ + public static final String C3P0_ACQUIRE_INCREMENT = "hibernate.c3p0.acquire_increment"; + /** + * Idle time before a C3P0 pooled connection is validated + */ + public static final String C3P0_IDLE_TEST_PERIOD = "hibernate.c3p0.idle_test_period"; + + /** + * Proxool/Hibernate property prefix + */ + public static final String PROXOOL_PREFIX = "hibernate.proxool"; + /** + * Proxool property to configure the Proxool Provider using an XML (/path/to/file.xml) + */ + public static final String PROXOOL_XML = "hibernate.proxool.xml"; + /** + * Proxool property to configure the Proxool Provider using a properties file (/path/to/proxool.properties) + */ + public static final String PROXOOL_PROPERTIES = "hibernate.proxool.properties"; + /** + * Proxool property to configure the Proxool Provider from an already existing pool (true / false) + */ + public static final String PROXOOL_EXISTING_POOL = "hibernate.proxool.existing_pool"; + /** + * Proxool property with the Proxool pool alias to use + * (Required for PROXOOL_EXISTING_POOL, PROXOOL_PROPERTIES, or + * PROXOOL_XML) + */ + public static final String PROXOOL_POOL_ALIAS = "hibernate.proxool.pool_alias"; + + /** + * Enable automatic session close at end of transaction + */ + public static final String AUTO_CLOSE_SESSION = "hibernate.transaction.auto_close_session"; + /** + * Enable automatic flush during the JTA beforeCompletion() callback + */ + public static final String FLUSH_BEFORE_COMPLETION = "hibernate.transaction.flush_before_completion"; + /** + * Specifies how Hibernate should release JDBC connections. + */ + public static final String RELEASE_CONNECTIONS = "hibernate.connection.release_mode"; + /** + * Context scoping impl for {@link org.hibernate.SessionFactory#getCurrentSession()} processing. + */ + public static final String CURRENT_SESSION_CONTEXT_CLASS = "hibernate.current_session_context_class"; + /** + * TransactionFactory implementor to use for creating Transactions + */ + public static final String TRANSACTION_STRATEGY = "hibernate.transaction.factory_class"; + /** + * TransactionManagerLookup implementor to use for obtaining the TransactionManager + */ + public static final String TRANSACTION_MANAGER_STRATEGY = "hibernate.transaction.manager_lookup_class"; + /** + * JNDI name of JTA UserTransaction object + */ + public static final String USER_TRANSACTION = "jta.UserTransaction"; + + /** + * The CacheProvider implementation class + */ + public static final String CACHE_PROVIDER = "hibernate.cache.provider_class"; + /** + * The CacheProvider implementation class + */ + public static final String CACHE_PROVIDER_CONFIG = "hibernate.cache.provider_configuration_file_resource_path"; + /** + * The CacheProvider JNDI namespace, if pre-bound to JNDI. + */ + public static final String CACHE_NAMESPACE = "hibernate.cache.jndi"; + /** + * Enable the query cache (disabled by default) + */ + public static final String USE_QUERY_CACHE = "hibernate.cache.use_query_cache"; + /** + * The QueryCacheFactory implementation class. + */ + public static final String QUERY_CACHE_FACTORY = "hibernate.cache.query_cache_factory"; + /** + * Enable the second-level cache (enabled by default) + */ + public static final String USE_SECOND_LEVEL_CACHE = "hibernate.cache.use_second_level_cache"; + /** + * Optimize the cache for mimimal puts instead of minimal gets + */ + public static final String USE_MINIMAL_PUTS = "hibernate.cache.use_minimal_puts"; + /** + * The CacheProvider region name prefix + */ + public static final String CACHE_REGION_PREFIX = "hibernate.cache.region_prefix"; + /** + * Enable use of structured second-level cache entries + */ + public static final String USE_STRUCTURED_CACHE = "hibernate.cache.use_structured_entries"; + + /** + * Enable statistics collection + */ + public static final String GENERATE_STATISTICS = "hibernate.generate_statistics"; + + public static final String USE_IDENTIFIER_ROLLBACK = "hibernate.use_identifier_rollback"; + + /** + * Use bytecode libraries optimized property access + */ + public static final String USE_REFLECTION_OPTIMIZER = "hibernate.bytecode.use_reflection_optimizer"; + + /** + * The classname of the HQL query parser factory + */ + public static final String QUERY_TRANSLATOR = "hibernate.query.factory_class"; + + /** + * A comma-seperated list of token substitutions to use when translating a Hibernate + * query to SQL + */ + public static final String QUERY_SUBSTITUTIONS = "hibernate.query.substitutions"; + + /** + * Should named queries be checked during startup (the default is enabled). + *

    + * Mainly intended for test environments. + */ + public static final String QUERY_STARTUP_CHECKING = "hibernate.query.startup_check"; + + /** + * Auto export/update schema using hbm2ddl tool. Valid values are update, + * create, create-drop and validate. + */ + public static final String HBM2DDL_AUTO = "hibernate.hbm2ddl.auto"; + + /** + * The {@link org.hibernate.exception.SQLExceptionConverter} to use for converting SQLExceptions + * to Hibernate's JDBCException hierarchy. The default is to use the configured + * {@link org.hibernate.dialect.Dialect}'s preferred SQLExceptionConverter. + */ + public static final String SQL_EXCEPTION_CONVERTER = "hibernate.jdbc.sql_exception_converter"; + + /** + * Enable wrapping of JDBC result sets in order to speed up column name lookups for + * broken JDBC drivers + */ + public static final String WRAP_RESULT_SETS = "hibernate.jdbc.wrap_result_sets"; + + /** + * Enable ordering of update statements by primary key value + */ + public static final String ORDER_UPDATES = "hibernate.order_updates"; + + /** + * Enable ordering of insert statements for the purpose of more effecient JDBC batching. + */ + public static final String ORDER_INSERTS = "hibernate.order_inserts"; + + /** + * The EntityMode in which set the Session opened from the SessionFactory. + */ + public static final String DEFAULT_ENTITY_MODE = "hibernate.default_entity_mode"; + + /** + * The jacc context id of the deployment + */ + public static final String JACC_CONTEXTID = "hibernate.jacc_context_id"; + + public static final String BYTECODE_PROVIDER = "hibernate.bytecode.provider"; + + public static final String JPAQL_STRICT_COMPLIANCE= "hibernate.query.jpaql_strict_compliance"; + + private static final BytecodeProvider BYTECODE_PROVIDER_INSTANCE; + private static final boolean ENABLE_BINARY_STREAMS; + private static final boolean ENABLE_REFLECTION_OPTIMIZER; + private static final boolean JVM_SUPPORTS_LINKED_HASH_COLLECTIONS; + private static final boolean JVM_HAS_TIMESTAMP_BUG; + private static final boolean JVM_HAS_JDK14_TIMESTAMP; + private static final boolean JVM_SUPPORTS_GET_GENERATED_KEYS; + + private static final Properties GLOBAL_PROPERTIES; + private static final HashMap ISOLATION_LEVELS = new HashMap(); + private static final Map OBSOLETE_PROPERTIES = new HashMap(); + private static final Map RENAMED_PROPERTIES = new HashMap(); + + private static final Log log = LogFactory.getLog(Environment.class); + + /** + * Issues warnings to the user when any obsolete property names are used. + */ + public static void verifyProperties(Properties props) { + Iterator iter = props.keySet().iterator(); + Map propertiesToAdd = new HashMap(); + while ( iter.hasNext() ) { + final Object propertyName = iter.next(); + Object newPropertyName = OBSOLETE_PROPERTIES.get( propertyName ); + if ( newPropertyName != null ) { + log.warn( "Usage of obsolete property: " + propertyName + " no longer supported, use: " + newPropertyName ); + } + newPropertyName = RENAMED_PROPERTIES.get( propertyName ); + if ( newPropertyName != null ) { + log.warn( "Property [" + propertyName + "] has been renamed to [" + newPropertyName + "]; update your properties appropriately" ); + if ( ! props.containsKey( newPropertyName ) ) { + propertiesToAdd.put( newPropertyName, props.get( propertyName ) ); + } + } + } + props.putAll(propertiesToAdd); + } + + static { + + log.info("Hibernate " + VERSION); + + RENAMED_PROPERTIES.put( "hibernate.cglib.use_reflection_optimizer", USE_REFLECTION_OPTIMIZER ); + + ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_NONE), "NONE" ); + ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_READ_UNCOMMITTED), "READ_UNCOMMITTED" ); + ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_READ_COMMITTED), "READ_COMMITTED" ); + ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_REPEATABLE_READ), "REPEATABLE_READ" ); + ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_SERIALIZABLE), "SERIALIZABLE" ); + + GLOBAL_PROPERTIES = new Properties(); + //Set USE_REFLECTION_OPTIMIZER to false to fix HHH-227 + GLOBAL_PROPERTIES.setProperty( USE_REFLECTION_OPTIMIZER, Boolean.FALSE.toString() ); + + try { + InputStream stream = ConfigHelper.getResourceAsStream("/hibernate.properties"); + try { + GLOBAL_PROPERTIES.load(stream); + log.info( "loaded properties from resource hibernate.properties: " + PropertiesHelper.maskOut(GLOBAL_PROPERTIES, PASS) ); + } + catch (Exception e) { + log.error("problem loading properties from hibernate.properties"); + } + finally { + try{ + stream.close(); + } + catch (IOException ioe){ + log.error("could not close stream on hibernate.properties", ioe); + } + } + } + catch (HibernateException he) { + log.info("hibernate.properties not found"); + } + + try { + GLOBAL_PROPERTIES.putAll( System.getProperties() ); + } + catch (SecurityException se) { + log.warn("could not copy system properties, system properties will be ignored"); + } + + verifyProperties(GLOBAL_PROPERTIES); + + ENABLE_BINARY_STREAMS = PropertiesHelper.getBoolean(USE_STREAMS_FOR_BINARY, GLOBAL_PROPERTIES); + ENABLE_REFLECTION_OPTIMIZER = PropertiesHelper.getBoolean(USE_REFLECTION_OPTIMIZER, GLOBAL_PROPERTIES); + + if (ENABLE_BINARY_STREAMS) { + log.info("using java.io streams to persist binary types"); + } + if (ENABLE_REFLECTION_OPTIMIZER) { + log.info("using bytecode reflection optimizer"); + } + BYTECODE_PROVIDER_INSTANCE = buildBytecodeProvider( GLOBAL_PROPERTIES ); + + boolean getGeneratedKeysSupport; + try { + Statement.class.getMethod("getGeneratedKeys", null); + getGeneratedKeysSupport = true; + } + catch (NoSuchMethodException nsme) { + getGeneratedKeysSupport = false; + } + JVM_SUPPORTS_GET_GENERATED_KEYS = getGeneratedKeysSupport; + if (!JVM_SUPPORTS_GET_GENERATED_KEYS) log.info("JVM does not support Statement.getGeneratedKeys()"); + + boolean linkedHashSupport; + try { + Class.forName("java.util.LinkedHashSet"); + linkedHashSupport = true; + } + catch (ClassNotFoundException cnfe) { + linkedHashSupport = false; + } + JVM_SUPPORTS_LINKED_HASH_COLLECTIONS = linkedHashSupport; + if (!JVM_SUPPORTS_LINKED_HASH_COLLECTIONS) log.info("JVM does not support LinkedHasMap, LinkedHashSet - ordered maps and sets disabled"); + + JVM_HAS_TIMESTAMP_BUG = new Timestamp(123456789).getTime() != 123456789; + if (JVM_HAS_TIMESTAMP_BUG) log.info("using workaround for JVM bug in java.sql.Timestamp"); + Timestamp t = new Timestamp(0); + t.setNanos(5 * 1000000); + JVM_HAS_JDK14_TIMESTAMP = t.getTime() == 5; + if (JVM_HAS_JDK14_TIMESTAMP) { + log.info("using JDK 1.4 java.sql.Timestamp handling"); + } + else { + log.info("using pre JDK 1.4 java.sql.Timestamp handling"); + } + } + + public static BytecodeProvider getBytecodeProvider() { + return BYTECODE_PROVIDER_INSTANCE; + } + + /** + * Does this JVM have the IBM JDK 1.3.1. The bug is new Timestamp(x).getTime()!=x. + */ + public static boolean jvmHasTimestampBug() { + return JVM_HAS_TIMESTAMP_BUG; + } + + /** + * Does this JVM handle Timestamp in the JDK 1.4 compliant way? + */ + public static boolean jvmHasJDK14Timestamp() { + return JVM_HAS_JDK14_TIMESTAMP; + } + + /** + * Does this JVM support LinkedHashSet, LinkedHashMap. + * @see java.util.LinkedHashSet + * @see java.util.LinkedHashMap + */ + public static boolean jvmSupportsLinkedHashCollections() { + return JVM_SUPPORTS_LINKED_HASH_COLLECTIONS; + } + + public static boolean jvmSupportsGetGeneratedKeys() { + return JVM_SUPPORTS_GET_GENERATED_KEYS; + } + + /** + * Should we use streams to bind binary types to JDBC IN parameters. + * Property hibernate.jdbc.use_streams_for_binary. + * @see Environment#USE_STREAMS_FOR_BINARY + */ + public static boolean useStreamsForBinary() { + return ENABLE_BINARY_STREAMS; + } + + /** + * Should we use CGLIB reflection optimizer. + * Property hibernate.jdbc.use_refection_optimizer. + * @see Environment#USE_REFLECTION_OPTIMIZER + */ + public static boolean useReflectionOptimizer() { + return ENABLE_REFLECTION_OPTIMIZER; + } + + private Environment() { throw new UnsupportedOperationException(); } + + /** + * Return System properties, extended by any properties specified + * in hibernate.properties. + * @return Properties + */ + public static Properties getProperties() { + Properties copy = new Properties(); + copy.putAll(GLOBAL_PROPERTIES); + return copy; + } + + /** + * Get the name of a JDBC transaction isolation level + * + * @see java.sql.Connection + * @param isolation as defined by java.sql.Connection + * @return a human-readable name + */ + public static String isolationLevelToString(int isolation) { + return (String) ISOLATION_LEVELS.get( new Integer(isolation) ); + } + + + public static BytecodeProvider buildBytecodeProvider(Properties properties) { + String provider = PropertiesHelper.getString( Environment.BYTECODE_PROVIDER, properties, "cglib" ); + log.info( "Bytecode provider name : " + provider ); + return buildBytecodeProvider( provider ); + } + + private static BytecodeProvider buildBytecodeProvider(String providerName) { + if ( "javassist".equals( providerName ) ) { + return new org.hibernate.bytecode.javassist.BytecodeProviderImpl(); + } + else if ( "cglib".equals( providerName ) ) { + return new org.hibernate.bytecode.cglib.BytecodeProviderImpl(); + } + else { + log.warn( "unrecognized bytecode provider [" + providerName + "], using cglib by default" ); + return new org.hibernate.bytecode.cglib.BytecodeProviderImpl(); + } + } + +} diff --git a/src/org/hibernate/cfg/ExtendsQueueEntry.java b/src/org/hibernate/cfg/ExtendsQueueEntry.java new file mode 100644 index 0000000000..2d058e0b2c --- /dev/null +++ b/src/org/hibernate/cfg/ExtendsQueueEntry.java @@ -0,0 +1,33 @@ +package org.hibernate.cfg; + +import org.dom4j.Document; + +/** + * Represents a mapping queued for delayed processing to await + * processing of an extends entity upon which it depends. + * + * @author Steve Ebersole + */ +public class ExtendsQueueEntry { + private final String explicitName; + private final String mappingPackage; + private final Document document; + + public ExtendsQueueEntry(String explicitName, String mappingPackage, Document document) { + this.explicitName = explicitName; + this.mappingPackage = mappingPackage; + this.document = document; + } + + public String getExplicitName() { + return explicitName; + } + + public String getMappingPackage() { + return mappingPackage; + } + + public Document getDocument() { + return document; + } +} diff --git a/src/org/hibernate/cfg/HbmBinder.java b/src/org/hibernate/cfg/HbmBinder.java new file mode 100644 index 0000000000..6ce93a63ea --- /dev/null +++ b/src/org/hibernate/cfg/HbmBinder.java @@ -0,0 +1,3063 @@ +// $Id$ +package org.hibernate.cfg; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Properties; +import java.util.StringTokenizer; + +import org.apache.commons.collections.SequencedHashMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.dom4j.Attribute; +import org.dom4j.Document; +import org.dom4j.Element; +import org.hibernate.CacheMode; +import org.hibernate.EntityMode; +import org.hibernate.FetchMode; +import org.hibernate.FlushMode; +import org.hibernate.MappingException; +import org.hibernate.engine.FilterDefinition; +import org.hibernate.engine.NamedQueryDefinition; +import org.hibernate.engine.Versioning; +import org.hibernate.engine.ExecuteUpdateResultCheckStyle; +import org.hibernate.id.PersistentIdentifierGenerator; +import org.hibernate.mapping.Any; +import org.hibernate.mapping.Array; +import org.hibernate.mapping.AuxiliaryDatabaseObject; +import org.hibernate.mapping.Backref; +import org.hibernate.mapping.Bag; +import org.hibernate.mapping.Collection; +import org.hibernate.mapping.Column; +import org.hibernate.mapping.Component; +import org.hibernate.mapping.DependantValue; +import org.hibernate.mapping.Fetchable; +import org.hibernate.mapping.Filterable; +import org.hibernate.mapping.Formula; +import org.hibernate.mapping.IdentifierBag; +import org.hibernate.mapping.IdentifierCollection; +import org.hibernate.mapping.IndexBackref; +import org.hibernate.mapping.IndexedCollection; +import org.hibernate.mapping.Join; +import org.hibernate.mapping.JoinedSubclass; +import org.hibernate.mapping.KeyValue; +import org.hibernate.mapping.List; +import org.hibernate.mapping.ManyToOne; +import org.hibernate.mapping.Map; +import org.hibernate.mapping.MetaAttribute; +import org.hibernate.mapping.OneToMany; +import org.hibernate.mapping.OneToOne; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.mapping.PrimitiveArray; +import org.hibernate.mapping.Property; +import org.hibernate.mapping.PropertyGeneration; +import org.hibernate.mapping.RootClass; +import org.hibernate.mapping.Selectable; +import org.hibernate.mapping.Set; +import org.hibernate.mapping.SimpleAuxiliaryDatabaseObject; +import org.hibernate.mapping.SimpleValue; +import org.hibernate.mapping.SingleTableSubclass; +import org.hibernate.mapping.Subclass; +import org.hibernate.mapping.Table; +import org.hibernate.mapping.ToOne; +import org.hibernate.mapping.TypeDef; +import org.hibernate.mapping.UnionSubclass; +import org.hibernate.mapping.UniqueKey; +import org.hibernate.mapping.Value; +import org.hibernate.persister.entity.JoinedSubclassEntityPersister; +import org.hibernate.persister.entity.SingleTableEntityPersister; +import org.hibernate.persister.entity.UnionSubclassEntityPersister; +import org.hibernate.type.DiscriminatorType; +import org.hibernate.type.ForeignKeyDirection; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.JoinedIterator; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +/** + * Walks an XML mapping document and produces the Hibernate configuration-time metamodel (the + * classes in the mapping package) + * + * @author Gavin King + */ +public final class HbmBinder { + + private static final Log log = LogFactory.getLog( HbmBinder.class ); + + /** + * Private constructor to disallow instantiation. + */ + private HbmBinder() { + } + + /** + * The main contract into the hbm.xml-based binder. Performs necessary binding operations + * represented by the given DOM. + * + * @param doc The DOM to be parsed and bound. + * @param mappings Current bind state. + * @param inheritedMetas Any inherited meta-tag information. + * @throws MappingException + */ + public static void bindRoot(Document doc, Mappings mappings, java.util.Map inheritedMetas) + throws MappingException { + + java.util.List names = HbmBinder.getExtendsNeeded( doc, mappings ); + if ( !names.isEmpty() ) { + // classes mentioned in extends not available - so put it in queue + Element hmNode = doc.getRootElement(); + Attribute packNode = hmNode.attribute( "package" ); + String packageName = null; + if ( packNode != null ) { + packageName = packNode.getValue(); + } + Iterator itr = names.iterator(); + while ( itr.hasNext() ) { + String extendsName = (String) itr.next(); + mappings.addToExtendsQueue( new ExtendsQueueEntry( extendsName, packageName, doc ) ); + } + return; + } + + Element hmNode = doc.getRootElement(); + // get meta's from + inheritedMetas = getMetas( hmNode, inheritedMetas, true ); + extractRootAttributes( hmNode, mappings ); + + Iterator rootChildren = hmNode.elementIterator(); + while ( rootChildren.hasNext() ) { + final Element element = (Element) rootChildren.next(); + final String elementName = element.getName(); + + if ( "filter-def".equals( elementName ) ) { + parseFilterDef( element, mappings ); + } + else if ( "typedef".equals( elementName ) ) { + bindTypeDef( element, mappings ); + } + else if ( "class".equals( elementName ) ) { + RootClass rootclass = new RootClass(); + bindRootClass( element, rootclass, mappings, inheritedMetas ); + mappings.addClass( rootclass ); + } + else if ( "subclass".equals( elementName ) ) { + PersistentClass superModel = getSuperclass( mappings, element ); + handleSubclass( superModel, mappings, element, inheritedMetas ); + } + else if ( "joined-subclass".equals( elementName ) ) { + PersistentClass superModel = getSuperclass( mappings, element ); + handleJoinedSubclass( superModel, mappings, element, inheritedMetas ); + } + else if ( "union-subclass".equals( elementName ) ) { + PersistentClass superModel = getSuperclass( mappings, element ); + handleUnionSubclass( superModel, mappings, element, inheritedMetas ); + } + else if ( "query".equals( elementName ) ) { + bindNamedQuery( element, null, mappings ); + } + else if ( "sql-query".equals( elementName ) ) { + bindNamedSQLQuery( element, null, mappings ); + } + else if ( "resultset".equals( elementName ) ) { + bindResultSetMappingDefinition( element, null, mappings ); + } + else if ( "import".equals( elementName ) ) { + bindImport( element, mappings ); + } + else if ( "database-object".equals( elementName ) ) { + bindAuxiliaryDatabaseObject( element, mappings ); + } + } + } + + private static void bindImport(Element importNode, Mappings mappings) { + String className = getClassName( importNode.attribute( "class" ), mappings ); + Attribute renameNode = importNode.attribute( "rename" ); + String rename = ( renameNode == null ) ? + StringHelper.unqualify( className ) : + renameNode.getValue(); + log.debug( "Import: " + rename + " -> " + className ); + mappings.addImport( className, rename ); + } + + private static void bindTypeDef(Element typedefNode, Mappings mappings) { + String typeClass = typedefNode.attributeValue( "class" ); + String typeName = typedefNode.attributeValue( "name" ); + Iterator paramIter = typedefNode.elementIterator( "param" ); + Properties parameters = new Properties(); + while ( paramIter.hasNext() ) { + Element param = (Element) paramIter.next(); + parameters.setProperty( param.attributeValue( "name" ), param.getTextTrim() ); + } + mappings.addTypeDef( typeName, typeClass, parameters ); + } + + private static void bindAuxiliaryDatabaseObject(Element auxDbObjectNode, Mappings mappings) { + AuxiliaryDatabaseObject auxDbObject = null; + Element definitionNode = auxDbObjectNode.element( "definition" ); + if ( definitionNode != null ) { + try { + auxDbObject = ( AuxiliaryDatabaseObject ) ReflectHelper + .classForName( definitionNode.attributeValue( "class" ) ) + .newInstance(); + } + catch( ClassNotFoundException e ) { + throw new MappingException( + "could not locate custom database object class [" + + definitionNode.attributeValue( "class" ) + "]" + ); + } + catch( Throwable t ) { + throw new MappingException( + "could not instantiate custom database object class [" + + definitionNode.attributeValue( "class" ) + "]" + ); + } + } + else { + auxDbObject = new SimpleAuxiliaryDatabaseObject( + auxDbObjectNode.elementTextTrim( "create" ), + auxDbObjectNode.elementTextTrim( "drop" ) + ); + } + + Iterator dialectScopings = auxDbObjectNode.elementIterator( "dialect-scope" ); + while ( dialectScopings.hasNext() ) { + Element dialectScoping = ( Element ) dialectScopings.next(); + auxDbObject.addDialectScope( dialectScoping.attributeValue( "name" ) ); + } + + mappings.addAuxiliaryDatabaseObject( auxDbObject ); + } + + private static void extractRootAttributes(Element hmNode, Mappings mappings) { + Attribute schemaNode = hmNode.attribute( "schema" ); + mappings.setSchemaName( ( schemaNode == null ) ? null : schemaNode.getValue() ); + + Attribute catalogNode = hmNode.attribute( "catalog" ); + mappings.setCatalogName( ( catalogNode == null ) ? null : catalogNode.getValue() ); + + Attribute dcNode = hmNode.attribute( "default-cascade" ); + mappings.setDefaultCascade( ( dcNode == null ) ? "none" : dcNode.getValue() ); + + Attribute daNode = hmNode.attribute( "default-access" ); + mappings.setDefaultAccess( ( daNode == null ) ? "property" : daNode.getValue() ); + + Attribute dlNode = hmNode.attribute( "default-lazy" ); + mappings.setDefaultLazy( dlNode == null || dlNode.getValue().equals( "true" ) ); + + Attribute aiNode = hmNode.attribute( "auto-import" ); + mappings.setAutoImport( ( aiNode == null ) || "true".equals( aiNode.getValue() ) ); + + Attribute packNode = hmNode.attribute( "package" ); + if ( packNode != null ) mappings.setDefaultPackage( packNode.getValue() ); + } + + /** + * Responsible for perfoming the bind operation related to an <class/> mapping element. + * + * @param node The DOM Element for the <class/> element. + * @param rootClass The mapping instance to which to bind the information. + * @param mappings The current bind state. + * @param inheritedMetas Any inherited meta-tag information. + * @throws MappingException + */ + public static void bindRootClass(Element node, RootClass rootClass, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + bindClass( node, rootClass, mappings, inheritedMetas ); + inheritedMetas = getMetas( node, inheritedMetas, true ); // get meta's from + bindRootPersistentClassCommonValues( node, inheritedMetas, mappings, rootClass ); + } + + private static void bindRootPersistentClassCommonValues(Element node, + java.util.Map inheritedMetas, Mappings mappings, RootClass entity) + throws MappingException { + + // DB-OBJECTNAME + + Attribute schemaNode = node.attribute( "schema" ); + String schema = schemaNode == null ? + mappings.getSchemaName() : schemaNode.getValue(); + + Attribute catalogNode = node.attribute( "catalog" ); + String catalog = catalogNode == null ? + mappings.getCatalogName() : catalogNode.getValue(); + + Table table = mappings.addTable( + schema, + catalog, + getClassTableName( entity, node, schema, catalog, null, mappings ), + getSubselect( node ), + entity.isAbstract() != null && entity.isAbstract().booleanValue() + ); + entity.setTable( table ); + bindComment(table, node); + + log.info( + "Mapping class: " + entity.getEntityName() + + " -> " + entity.getTable().getName() + ); + + // MUTABLE + Attribute mutableNode = node.attribute( "mutable" ); + entity.setMutable( ( mutableNode == null ) || mutableNode.getValue().equals( "true" ) ); + + // WHERE + Attribute whereNode = node.attribute( "where" ); + if ( whereNode != null ) entity.setWhere( whereNode.getValue() ); + + // CHECK + Attribute chNode = node.attribute( "check" ); + if ( chNode != null ) table.addCheckConstraint( chNode.getValue() ); + + // POLYMORPHISM + Attribute polyNode = node.attribute( "polymorphism" ); + entity.setExplicitPolymorphism( ( polyNode != null ) + && polyNode.getValue().equals( "explicit" ) ); + + // ROW ID + Attribute rowidNode = node.attribute( "rowid" ); + if ( rowidNode != null ) table.setRowId( rowidNode.getValue() ); + + Iterator subnodes = node.elementIterator(); + while ( subnodes.hasNext() ) { + + Element subnode = (Element) subnodes.next(); + String name = subnode.getName(); + + if ( "id".equals( name ) ) { + // ID + bindSimpleId( subnode, entity, mappings, inheritedMetas ); + } + else if ( "composite-id".equals( name ) ) { + // COMPOSITE-ID + bindCompositeId( subnode, entity, mappings, inheritedMetas ); + } + else if ( "version".equals( name ) || "timestamp".equals( name ) ) { + // VERSION / TIMESTAMP + bindVersioningProperty( table, subnode, mappings, name, entity, inheritedMetas ); + } + else if ( "discriminator".equals( name ) ) { + // DISCRIMINATOR + bindDiscriminatorProperty( table, entity, subnode, mappings ); + } + else if ( "cache".equals( name ) ) { + entity.setCacheConcurrencyStrategy( subnode.attributeValue( "usage" ) ); + entity.setCacheRegionName( subnode.attributeValue( "region" ) ); + entity.setLazyPropertiesCacheable( !"non-lazy".equals( subnode.attributeValue( "include" ) ) ); + } + + } + + // Primary key constraint + entity.createPrimaryKey(); + + createClassProperties( node, entity, mappings, inheritedMetas ); + } + + private static void bindSimpleId(Element idNode, RootClass entity, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + String propertyName = idNode.attributeValue( "name" ); + + SimpleValue id = new SimpleValue( entity.getTable() ); + entity.setIdentifier( id ); + + // if ( propertyName == null || entity.getPojoRepresentation() == null ) { + // bindSimpleValue( idNode, id, false, RootClass.DEFAULT_IDENTIFIER_COLUMN_NAME, mappings ); + // if ( !id.isTypeSpecified() ) { + // throw new MappingException( "must specify an identifier type: " + entity.getEntityName() + // ); + // } + // } + // else { + // bindSimpleValue( idNode, id, false, propertyName, mappings ); + // PojoRepresentation pojo = entity.getPojoRepresentation(); + // id.setTypeUsingReflection( pojo.getClassName(), propertyName ); + // + // Property prop = new Property(); + // prop.setValue( id ); + // bindProperty( idNode, prop, mappings, inheritedMetas ); + // entity.setIdentifierProperty( prop ); + // } + + if ( propertyName == null ) { + bindSimpleValue( idNode, id, false, RootClass.DEFAULT_IDENTIFIER_COLUMN_NAME, mappings ); + } + else { + bindSimpleValue( idNode, id, false, propertyName, mappings ); + } + + if ( propertyName == null || !entity.hasPojoRepresentation() ) { + if ( !id.isTypeSpecified() ) { + throw new MappingException( "must specify an identifier type: " + + entity.getEntityName() ); + } + } + else { + id.setTypeUsingReflection( entity.getClassName(), propertyName ); + } + + if ( propertyName != null ) { + Property prop = new Property(); + prop.setValue( id ); + bindProperty( idNode, prop, mappings, inheritedMetas ); + entity.setIdentifierProperty( prop ); + } + + // TODO: + /* + * if ( id.getHibernateType().getReturnedClass().isArray() ) throw new MappingException( + * "illegal use of an array as an identifier (arrays don't reimplement equals)" ); + */ + makeIdentifier( idNode, id, mappings ); + } + + private static void bindCompositeId(Element idNode, RootClass entity, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + String propertyName = idNode.attributeValue( "name" ); + Component id = new Component( entity ); + entity.setIdentifier( id ); + bindCompositeId( idNode, id, entity, propertyName, mappings, inheritedMetas ); + if ( propertyName == null ) { + entity.setEmbeddedIdentifier( id.isEmbedded() ); + if ( id.isEmbedded() ) { + // todo : what is the implication of this? + id.setDynamic( !entity.hasPojoRepresentation() ); + /* + * Property prop = new Property(); prop.setName("id"); + * prop.setPropertyAccessorName("embedded"); prop.setValue(id); + * entity.setIdentifierProperty(prop); + */ + } + } + else { + Property prop = new Property(); + prop.setValue( id ); + bindProperty( idNode, prop, mappings, inheritedMetas ); + entity.setIdentifierProperty( prop ); + } + + makeIdentifier( idNode, id, mappings ); + + } + + private static void bindVersioningProperty(Table table, Element subnode, Mappings mappings, + String name, RootClass entity, java.util.Map inheritedMetas) { + + String propertyName = subnode.attributeValue( "name" ); + SimpleValue val = new SimpleValue( table ); + bindSimpleValue( subnode, val, false, propertyName, mappings ); + if ( !val.isTypeSpecified() ) { + // this is either a tag with no type attribute, + // or a tag + if ( "version".equals( name ) ) { + val.setTypeName( "integer" ); + } + else { + if ( "db".equals( subnode.attributeValue( "source" ) ) ) { + val.setTypeName( "dbtimestamp" ); + } + else { + val.setTypeName( "timestamp" ); + } + } + } + Property prop = new Property(); + prop.setValue( val ); + bindProperty( subnode, prop, mappings, inheritedMetas ); + // for version properties marked as being generated, make sure they are "always" + // generated; aka, "insert" is invalid; this is dis-allowed by the DTD, + // but just to make sure... + if ( prop.getGeneration() == PropertyGeneration.INSERT ) { + throw new MappingException( "'generated' attribute cannot be 'insert' for versioning property" ); + } + makeVersion( subnode, val ); + entity.setVersion( prop ); + entity.addProperty( prop ); + } + + private static void bindDiscriminatorProperty(Table table, RootClass entity, Element subnode, + Mappings mappings) { + SimpleValue discrim = new SimpleValue( table ); + entity.setDiscriminator( discrim ); + bindSimpleValue( + subnode, + discrim, + false, + RootClass.DEFAULT_DISCRIMINATOR_COLUMN_NAME, + mappings + ); + if ( !discrim.isTypeSpecified() ) { + discrim.setTypeName( "string" ); + // ( (Column) discrim.getColumnIterator().next() ).setType(type); + } + entity.setPolymorphic( true ); + if ( "true".equals( subnode.attributeValue( "force" ) ) ) + entity.setForceDiscriminator( true ); + if ( "false".equals( subnode.attributeValue( "insert" ) ) ) + entity.setDiscriminatorInsertable( false ); + } + + public static void bindClass(Element node, PersistentClass persistentClass, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + // transfer an explicitly defined entity name + // handle the lazy attribute + Attribute lazyNode = node.attribute( "lazy" ); + boolean lazy = lazyNode == null ? + mappings.isDefaultLazy() : + "true".equals( lazyNode.getValue() ); + // go ahead and set the lazy here, since pojo.proxy can override it. + persistentClass.setLazy( lazy ); + + String entityName = node.attributeValue( "entity-name" ); + if ( entityName == null ) entityName = getClassName( node.attribute("name"), mappings ); + if ( entityName==null ) { + throw new MappingException( "Unable to determine entity name" ); + } + persistentClass.setEntityName( entityName ); + + bindPojoRepresentation( node, persistentClass, mappings, inheritedMetas ); + bindDom4jRepresentation( node, persistentClass, mappings, inheritedMetas ); + bindMapRepresentation( node, persistentClass, mappings, inheritedMetas ); + + bindPersistentClassCommonValues( node, persistentClass, mappings, inheritedMetas ); + + } + + private static void bindPojoRepresentation(Element node, PersistentClass entity, + Mappings mappings, java.util.Map metaTags) { + + String className = getClassName( node.attribute( "name" ), mappings ); + String proxyName = getClassName( node.attribute( "proxy" ), mappings ); + + entity.setClassName( className ); + + if ( proxyName != null ) { + entity.setProxyInterfaceName( proxyName ); + entity.setLazy( true ); + } + else if ( entity.isLazy() ) { + entity.setProxyInterfaceName( className ); + } + + Element tuplizer = locateTuplizerDefinition( node, EntityMode.POJO ); + if ( tuplizer != null ) { + entity.addTuplizer( EntityMode.POJO, tuplizer.attributeValue( "class" ) ); + } + } + + private static void bindDom4jRepresentation(Element node, PersistentClass entity, + Mappings mappings, java.util.Map inheritedMetas) { + String nodeName = node.attributeValue( "node" ); + if (nodeName==null) nodeName = StringHelper.unqualify( entity.getEntityName() ); + entity.setNodeName(nodeName); + + Element tuplizer = locateTuplizerDefinition( node, EntityMode.DOM4J ); + if ( tuplizer != null ) { + entity.addTuplizer( EntityMode.DOM4J, tuplizer.attributeValue( "class" ) ); + } + } + + private static void bindMapRepresentation(Element node, PersistentClass entity, + Mappings mappings, java.util.Map inheritedMetas) { + Element tuplizer = locateTuplizerDefinition( node, EntityMode.MAP ); + if ( tuplizer != null ) { + entity.addTuplizer( EntityMode.MAP, tuplizer.attributeValue( "class" ) ); + } + } + + /** + * Locate any explicit tuplizer definition in the metadata, for the given entity-mode. + * + * @param container The containing element (representing the entity/component) + * @param entityMode The entity-mode for which to locate the tuplizer element + * @return The tuplizer element, or null. + */ + private static Element locateTuplizerDefinition(Element container, EntityMode entityMode) { + Iterator itr = container.elements( "tuplizer" ).iterator(); + while( itr.hasNext() ) { + final Element tuplizerElem = ( Element ) itr.next(); + if ( entityMode.toString().equals( tuplizerElem.attributeValue( "entity-mode") ) ) { + return tuplizerElem; + } + } + return null; + } + + private static void bindPersistentClassCommonValues(Element node, PersistentClass entity, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + // DISCRIMINATOR + Attribute discriminatorNode = node.attribute( "discriminator-value" ); + entity.setDiscriminatorValue( ( discriminatorNode == null ) + ? entity.getEntityName() + : discriminatorNode.getValue() ); + + // DYNAMIC UPDATE + Attribute dynamicNode = node.attribute( "dynamic-update" ); + entity.setDynamicUpdate( + dynamicNode != null && "true".equals( dynamicNode.getValue() ) + ); + + // DYNAMIC INSERT + Attribute insertNode = node.attribute( "dynamic-insert" ); + entity.setDynamicInsert( + insertNode != null && "true".equals( insertNode.getValue() ) + ); + + // IMPORT + mappings.addImport( entity.getEntityName(), entity.getEntityName() ); + if ( mappings.isAutoImport() && entity.getEntityName().indexOf( '.' ) > 0 ) { + mappings.addImport( + entity.getEntityName(), + StringHelper.unqualify( entity.getEntityName() ) + ); + } + + // BATCH SIZE + Attribute batchNode = node.attribute( "batch-size" ); + if ( batchNode != null ) entity.setBatchSize( Integer.parseInt( batchNode.getValue() ) ); + + // SELECT BEFORE UPDATE + Attribute sbuNode = node.attribute( "select-before-update" ); + if ( sbuNode != null ) entity.setSelectBeforeUpdate( "true".equals( sbuNode.getValue() ) ); + + // OPTIMISTIC LOCK MODE + Attribute olNode = node.attribute( "optimistic-lock" ); + entity.setOptimisticLockMode( getOptimisticLockMode( olNode ) ); + + entity.setMetaAttributes( getMetas( node, inheritedMetas ) ); + + // PERSISTER + Attribute persisterNode = node.attribute( "persister" ); + if ( persisterNode == null ) { + // persister = SingleTableEntityPersister.class; + } + else { + try { + entity.setEntityPersisterClass( ReflectHelper.classForName( persisterNode + .getValue() ) ); + } + catch (ClassNotFoundException cnfe) { + throw new MappingException( "Could not find persister class: " + + persisterNode.getValue() ); + } + } + + // CUSTOM SQL + handleCustomSQL( node, entity ); + + Iterator tables = node.elementIterator( "synchronize" ); + while ( tables.hasNext() ) { + entity.addSynchronizedTable( ( (Element) tables.next() ).attributeValue( "table" ) ); + } + + Attribute abstractNode = node.attribute( "abstract" ); + Boolean isAbstract = abstractNode == null + ? null + : "true".equals( abstractNode.getValue() ) + ? Boolean.TRUE + : "false".equals( abstractNode.getValue() ) + ? Boolean.FALSE + : null; + entity.setAbstract( isAbstract ); + } + + private static void handleCustomSQL(Element node, PersistentClass model) + throws MappingException { + Element element = node.element( "sql-insert" ); + if ( element != null ) { + boolean callable = isCallable( element ); + model.setCustomSQLInsert( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "sql-delete" ); + if ( element != null ) { + boolean callable = isCallable( element ); + model.setCustomSQLDelete( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "sql-update" ); + if ( element != null ) { + boolean callable = isCallable( element ); + model.setCustomSQLUpdate( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "loader" ); + if ( element != null ) { + model.setLoaderName( element.attributeValue( "query-ref" ) ); + } + } + + private static void handleCustomSQL(Element node, Join model) throws MappingException { + Element element = node.element( "sql-insert" ); + if ( element != null ) { + boolean callable = isCallable( element ); + model.setCustomSQLInsert( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "sql-delete" ); + if ( element != null ) { + boolean callable = isCallable( element ); + model.setCustomSQLDelete( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "sql-update" ); + if ( element != null ) { + boolean callable = isCallable( element ); + model.setCustomSQLUpdate( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + } + + private static void handleCustomSQL(Element node, Collection model) throws MappingException { + Element element = node.element( "sql-insert" ); + if ( element != null ) { + boolean callable = isCallable( element, true ); + model.setCustomSQLInsert( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "sql-delete" ); + if ( element != null ) { + boolean callable = isCallable( element, true ); + model.setCustomSQLDelete( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "sql-update" ); + if ( element != null ) { + boolean callable = isCallable( element, true ); + model.setCustomSQLUpdate( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + + element = node.element( "sql-delete-all" ); + if ( element != null ) { + boolean callable = isCallable( element, true ); + model.setCustomSQLDeleteAll( element.getTextTrim(), callable, getResultCheckStyle( element, callable ) ); + } + } + + private static boolean isCallable(Element e) throws MappingException { + return isCallable( e, true ); + } + + private static boolean isCallable(Element element, boolean supportsCallable) + throws MappingException { + Attribute attrib = element.attribute( "callable" ); + if ( attrib != null && "true".equals( attrib.getValue() ) ) { + if ( !supportsCallable ) { + throw new MappingException( "callable attribute not supported yet!" ); + } + return true; + } + return false; + } + + private static ExecuteUpdateResultCheckStyle getResultCheckStyle(Element element, boolean callable) throws MappingException { + Attribute attr = element.attribute( "check" ); + if ( attr == null ) { + // use COUNT as the default. This mimics the old behavior, although + // NONE might be a better option moving forward in the case of callable + return ExecuteUpdateResultCheckStyle.COUNT; + } + return ExecuteUpdateResultCheckStyle.parse( attr.getValue() ); + } + + public static void bindUnionSubclass(Element node, UnionSubclass unionSubclass, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + + bindClass( node, unionSubclass, mappings, inheritedMetas ); + inheritedMetas = getMetas( node, inheritedMetas, true ); // get meta's from + + if ( unionSubclass.getEntityPersisterClass() == null ) { + unionSubclass.getRootClass().setEntityPersisterClass( + UnionSubclassEntityPersister.class ); + } + + Attribute schemaNode = node.attribute( "schema" ); + String schema = schemaNode == null ? + mappings.getSchemaName() : schemaNode.getValue(); + + Attribute catalogNode = node.attribute( "catalog" ); + String catalog = catalogNode == null ? + mappings.getCatalogName() : catalogNode.getValue(); + + Table denormalizedSuperTable = unionSubclass.getSuperclass().getTable(); + Table mytable = mappings.addDenormalizedTable( + schema, + catalog, + getClassTableName(unionSubclass, node, schema, catalog, denormalizedSuperTable, mappings ), + unionSubclass.isAbstract() != null && unionSubclass.isAbstract().booleanValue(), + getSubselect( node ), + denormalizedSuperTable + ); + unionSubclass.setTable( mytable ); + + log.info( + "Mapping union-subclass: " + unionSubclass.getEntityName() + + " -> " + unionSubclass.getTable().getName() + ); + + createClassProperties( node, unionSubclass, mappings, inheritedMetas ); + + } + + public static void bindSubclass(Element node, Subclass subclass, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + + bindClass( node, subclass, mappings, inheritedMetas ); + inheritedMetas = getMetas( node, inheritedMetas, true ); // get meta's from + + if ( subclass.getEntityPersisterClass() == null ) { + subclass.getRootClass() + .setEntityPersisterClass( SingleTableEntityPersister.class ); + } + + log.info( + "Mapping subclass: " + subclass.getEntityName() + + " -> " + subclass.getTable().getName() + ); + + // properties + createClassProperties( node, subclass, mappings, inheritedMetas ); + } + + private static String getClassTableName( + PersistentClass model, Element node, String schema, String catalog, Table denormalizedSuperTable, + Mappings mappings + ) { + Attribute tableNameNode = node.attribute( "table" ); + String logicalTableName; + String physicalTableName; + if ( tableNameNode == null ) { + logicalTableName = StringHelper.unqualify( model.getEntityName() ); + physicalTableName = mappings.getNamingStrategy().classToTableName( model.getEntityName() ); + } + else { + logicalTableName = tableNameNode.getValue(); + physicalTableName = mappings.getNamingStrategy().tableName( logicalTableName ); + } + mappings.addTableBinding( schema, catalog, logicalTableName, physicalTableName, denormalizedSuperTable ); + return physicalTableName; + } + + public static void bindJoinedSubclass(Element node, JoinedSubclass joinedSubclass, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + + bindClass( node, joinedSubclass, mappings, inheritedMetas ); + inheritedMetas = getMetas( node, inheritedMetas, true ); // get meta's from + // + + // joined subclasses + if ( joinedSubclass.getEntityPersisterClass() == null ) { + joinedSubclass.getRootClass() + .setEntityPersisterClass( JoinedSubclassEntityPersister.class ); + } + + Attribute schemaNode = node.attribute( "schema" ); + String schema = schemaNode == null ? + mappings.getSchemaName() : schemaNode.getValue(); + + Attribute catalogNode = node.attribute( "catalog" ); + String catalog = catalogNode == null ? + mappings.getCatalogName() : catalogNode.getValue(); + + Table mytable = mappings.addTable( + schema, + catalog, + getClassTableName( joinedSubclass, node, schema, catalog, null, mappings ), + getSubselect( node ), + false + ); + joinedSubclass.setTable( mytable ); + bindComment(mytable, node); + + log.info( + "Mapping joined-subclass: " + joinedSubclass.getEntityName() + + " -> " + joinedSubclass.getTable().getName() + ); + + // KEY + Element keyNode = node.element( "key" ); + SimpleValue key = new DependantValue( mytable, joinedSubclass.getIdentifier() ); + joinedSubclass.setKey( key ); + key.setCascadeDeleteEnabled( "cascade".equals( keyNode.attributeValue( "on-delete" ) ) ); + bindSimpleValue( keyNode, key, false, joinedSubclass.getEntityName(), mappings ); + + // model.getKey().setType( new Type( model.getIdentifier() ) ); + joinedSubclass.createPrimaryKey(); + joinedSubclass.createForeignKey(); + + // CHECK + Attribute chNode = node.attribute( "check" ); + if ( chNode != null ) mytable.addCheckConstraint( chNode.getValue() ); + + // properties + createClassProperties( node, joinedSubclass, mappings, inheritedMetas ); + + } + + private static void bindJoin(Element node, Join join, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + + PersistentClass persistentClass = join.getPersistentClass(); + String path = persistentClass.getEntityName(); + + // TABLENAME + + Attribute schemaNode = node.attribute( "schema" ); + String schema = schemaNode == null ? + mappings.getSchemaName() : schemaNode.getValue(); + Attribute catalogNode = node.attribute( "catalog" ); + String catalog = catalogNode == null ? + mappings.getCatalogName() : catalogNode.getValue(); + Table primaryTable = persistentClass.getTable(); + Table table = mappings.addTable( + schema, + catalog, + getClassTableName( persistentClass, node, schema, catalog, primaryTable, mappings ), + getSubselect( node ), + false + ); + join.setTable( table ); + bindComment(table, node); + + Attribute fetchNode = node.attribute( "fetch" ); + if ( fetchNode != null ) { + join.setSequentialSelect( "select".equals( fetchNode.getValue() ) ); + } + + Attribute invNode = node.attribute( "inverse" ); + if ( invNode != null ) { + join.setInverse( "true".equals( invNode.getValue() ) ); + } + + Attribute nullNode = node.attribute( "optional" ); + if ( nullNode != null ) { + join.setOptional( "true".equals( nullNode.getValue() ) ); + } + + log.info( + "Mapping class join: " + persistentClass.getEntityName() + + " -> " + join.getTable().getName() + ); + + // KEY + Element keyNode = node.element( "key" ); + SimpleValue key = new DependantValue( table, persistentClass.getIdentifier() ); + join.setKey( key ); + key.setCascadeDeleteEnabled( "cascade".equals( keyNode.attributeValue( "on-delete" ) ) ); + bindSimpleValue( keyNode, key, false, persistentClass.getEntityName(), mappings ); + + // join.getKey().setType( new Type( lazz.getIdentifier() ) ); + join.createPrimaryKey(); + join.createForeignKey(); + + // PROPERTIES + Iterator iter = node.elementIterator(); + while ( iter.hasNext() ) { + Element subnode = (Element) iter.next(); + String name = subnode.getName(); + String propertyName = subnode.attributeValue( "name" ); + + Value value = null; + if ( "many-to-one".equals( name ) ) { + value = new ManyToOne( table ); + bindManyToOne( subnode, (ManyToOne) value, propertyName, true, mappings ); + } + else if ( "any".equals( name ) ) { + value = new Any( table ); + bindAny( subnode, (Any) value, true, mappings ); + } + else if ( "property".equals( name ) ) { + value = new SimpleValue( table ); + bindSimpleValue( subnode, (SimpleValue) value, true, propertyName, mappings ); + } + else if ( "component".equals( name ) || "dynamic-component".equals( name ) ) { + String subpath = StringHelper.qualify( path, propertyName ); + value = new Component( join ); + bindComponent( + subnode, + (Component) value, + join.getPersistentClass().getClassName(), + propertyName, + subpath, + true, + false, + mappings, + inheritedMetas, + false + ); + } + + if ( value != null ) { + Property prop = createProperty( value, propertyName, persistentClass + .getEntityName(), subnode, mappings, inheritedMetas ); + prop.setOptional( join.isOptional() ); + join.addProperty( prop ); + } + + } + + // CUSTOM SQL + handleCustomSQL( node, join ); + + } + + public static void bindColumns(final Element node, final SimpleValue simpleValue, + final boolean isNullable, final boolean autoColumn, final String propertyPath, + final Mappings mappings) throws MappingException { + + Table table = simpleValue.getTable(); + + // COLUMN(S) + Attribute columnAttribute = node.attribute( "column" ); + if ( columnAttribute == null ) { + Iterator iter = node.elementIterator(); + int count = 0; + while ( iter.hasNext() ) { + Element columnElement = (Element) iter.next(); + if ( columnElement.getName().equals( "column" ) ) { + Column column = new Column(); + column.setValue( simpleValue ); + column.setTypeIndex( count++ ); + bindColumn( columnElement, column, isNullable ); + String logicalColumnName = mappings.getNamingStrategy().logicalColumnName( + columnElement.attributeValue( "name" ), propertyPath + ); + column.setName( mappings.getNamingStrategy().columnName( + logicalColumnName ) ); + if ( table != null ) { + table.addColumn( column ); // table=null -> an association + // - fill it in later + //TODO fill in the mappings for table == null + mappings.addColumnBinding( logicalColumnName, column, table ); + } + + + simpleValue.addColumn( column ); + // column index + bindIndex( columnElement.attribute( "index" ), table, column, mappings ); + bindIndex( node.attribute( "index" ), table, column, mappings ); + //column unique-key + bindUniqueKey( columnElement.attribute( "unique-key" ), table, column, mappings ); + bindUniqueKey( node.attribute( "unique-key" ), table, column, mappings ); + } + else if ( columnElement.getName().equals( "formula" ) ) { + Formula formula = new Formula(); + formula.setFormula( columnElement.getText() ); + simpleValue.addFormula( formula ); + } + } + } + else { + if ( node.elementIterator( "column" ).hasNext() ) { + throw new MappingException( + "column attribute may not be used together with subelement" ); + } + if ( node.elementIterator( "formula" ).hasNext() ) { + throw new MappingException( + "column attribute may not be used together with subelement" ); + } + + Column column = new Column(); + column.setValue( simpleValue ); + bindColumn( node, column, isNullable ); + String logicalColumnName = mappings.getNamingStrategy().logicalColumnName( + columnAttribute.getValue(), propertyPath + ); + column.setName( mappings.getNamingStrategy().columnName( logicalColumnName ) ); + if ( table != null ) { + table.addColumn( column ); // table=null -> an association - fill + // it in later + //TODO fill in the mappings for table == null + mappings.addColumnBinding( logicalColumnName, column, table ); + } + simpleValue.addColumn( column ); + bindIndex( node.attribute( "index" ), table, column, mappings ); + bindUniqueKey( node.attribute( "unique-key" ), table, column, mappings ); + } + + if ( autoColumn && simpleValue.getColumnSpan() == 0 ) { + Column column = new Column(); + column.setValue( simpleValue ); + bindColumn( node, column, isNullable ); + column.setName( mappings.getNamingStrategy().propertyToColumnName( propertyPath ) ); + String logicalName = mappings.getNamingStrategy().logicalColumnName( null, propertyPath ); + mappings.addColumnBinding( logicalName, column, table ); + /* TODO: joinKeyColumnName & foreignKeyColumnName should be called either here or at a + * slightly higer level in the stack (to get all the information we need) + * Right now HbmBinder does not support the + */ + simpleValue.getTable().addColumn( column ); + simpleValue.addColumn( column ); + bindIndex( node.attribute( "index" ), table, column, mappings ); + bindUniqueKey( node.attribute( "unique-key" ), table, column, mappings ); + } + + } + + private static void bindIndex(Attribute indexAttribute, Table table, Column column, Mappings mappings) { + if ( indexAttribute != null && table != null ) { + StringTokenizer tokens = new StringTokenizer( indexAttribute.getValue(), ", " ); + while ( tokens.hasMoreTokens() ) { + table.getOrCreateIndex( tokens.nextToken() ).addColumn( column ); + } + } + } + + private static void bindUniqueKey(Attribute uniqueKeyAttribute, Table table, Column column, Mappings mappings) { + if ( uniqueKeyAttribute != null && table != null ) { + StringTokenizer tokens = new StringTokenizer( uniqueKeyAttribute.getValue(), ", " ); + while ( tokens.hasMoreTokens() ) { + table.getOrCreateUniqueKey( tokens.nextToken() ).addColumn( column ); + } + } + } + + // automatically makes a column with the default name if none is specifed by XML + public static void bindSimpleValue(Element node, SimpleValue simpleValue, boolean isNullable, + String path, Mappings mappings) throws MappingException { + bindSimpleValueType( node, simpleValue, mappings ); + + bindColumnsOrFormula( node, simpleValue, path, isNullable, mappings ); + + Attribute fkNode = node.attribute( "foreign-key" ); + if ( fkNode != null ) simpleValue.setForeignKeyName( fkNode.getValue() ); + } + + private static void bindSimpleValueType(Element node, SimpleValue simpleValue, Mappings mappings) + throws MappingException { + String typeName = null; + + Properties parameters = new Properties(); + + Attribute typeNode = node.attribute( "type" ); + if ( typeNode == null ) typeNode = node.attribute( "id-type" ); // for an any + if ( typeNode != null ) typeName = typeNode.getValue(); + + Element typeChild = node.element( "type" ); + if ( typeName == null && typeChild != null ) { + typeName = typeChild.attribute( "name" ).getValue(); + Iterator typeParameters = typeChild.elementIterator( "param" ); + + while ( typeParameters.hasNext() ) { + Element paramElement = (Element) typeParameters.next(); + parameters.setProperty( + paramElement.attributeValue( "name" ), + paramElement.getTextTrim() + ); + } + } + + TypeDef typeDef = mappings.getTypeDef( typeName ); + if ( typeDef != null ) { + typeName = typeDef.getTypeClass(); + // parameters on the property mapping should + // override parameters in the typedef + Properties allParameters = new Properties(); + allParameters.putAll( typeDef.getParameters() ); + allParameters.putAll( parameters ); + parameters = allParameters; + } + + if ( !parameters.isEmpty() ) simpleValue.setTypeParameters( parameters ); + + if ( typeName != null ) simpleValue.setTypeName( typeName ); + } + + public static void bindProperty( + Element node, + Property property, + Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + + String propName = node.attributeValue( "name" ); + property.setName( propName ); + String nodeName = node.attributeValue( "node" ); + if (nodeName==null) nodeName = propName; + property.setNodeName( nodeName ); + + // TODO: + //Type type = model.getValue().getType(); + //if (type==null) throw new MappingException( + //"Could not determine a property type for: " + model.getName() ); + + Attribute accessNode = node.attribute( "access" ); + if ( accessNode != null ) { + property.setPropertyAccessorName( accessNode.getValue() ); + } + else if ( node.getName().equals( "properties" ) ) { + property.setPropertyAccessorName( "embedded" ); + } + else { + property.setPropertyAccessorName( mappings.getDefaultAccess() ); + } + + Attribute cascadeNode = node.attribute( "cascade" ); + property.setCascade( cascadeNode == null ? mappings.getDefaultCascade() : cascadeNode + .getValue() ); + + Attribute updateNode = node.attribute( "update" ); + property.setUpdateable( updateNode == null || "true".equals( updateNode.getValue() ) ); + + Attribute insertNode = node.attribute( "insert" ); + property.setInsertable( insertNode == null || "true".equals( insertNode.getValue() ) ); + + Attribute lockNode = node.attribute( "optimistic-lock" ); + property.setOptimisticLocked( lockNode == null || "true".equals( lockNode.getValue() ) ); + + Attribute generatedNode = node.attribute( "generated" ); + String generationName = generatedNode == null ? null : generatedNode.getValue(); + PropertyGeneration generation = PropertyGeneration.parse( generationName ); + property.setGeneration( generation ); + + if ( generation == PropertyGeneration.ALWAYS || generation == PropertyGeneration.INSERT ) { + // generated properties can *never* be insertable... + if ( property.isInsertable() ) { + if ( insertNode == null ) { + // insertable simply because that is the user did not specify + // anything; just override it + property.setInsertable( false ); + } + else { + // the user specifically supplied insert="true", + // which constitutes an illegal combo + throw new MappingException( + "cannot specify both insert=\"true\" and generated=\"" + generation.getName() + + "\" for property: " + + propName + ); + } + } + + // properties generated on update can never be updateable... + if ( property.isUpdateable() && generation == PropertyGeneration.ALWAYS ) { + if ( updateNode == null ) { + // updateable only because the user did not specify + // anything; just override it + property.setUpdateable( false ); + } + else { + // the user specifically supplied update="true", + // which constitutes an illegal combo + throw new MappingException( + "cannot specify both update=\"true\" and generated=\"" + generation.getName() + + "\" for property: " + + propName + ); + } + } + } + + boolean isLazyable = "property".equals( node.getName() ) || + "component".equals( node.getName() ) || + "many-to-one".equals( node.getName() ) || + "one-to-one".equals( node.getName() ) || + "any".equals( node.getName() ); + if ( isLazyable ) { + Attribute lazyNode = node.attribute( "lazy" ); + property.setLazy( lazyNode != null && "true".equals( lazyNode.getValue() ) ); + } + + if ( log.isDebugEnabled() ) { + String msg = "Mapped property: " + property.getName(); + String columns = columns( property.getValue() ); + if ( columns.length() > 0 ) msg += " -> " + columns; + // TODO: this fails if we run with debug on! + // if ( model.getType()!=null ) msg += ", type: " + model.getType().getName(); + log.debug( msg ); + } + + property.setMetaAttributes( getMetas( node, inheritedMetas ) ); + + } + + private static String columns(Value val) { + StringBuffer columns = new StringBuffer(); + Iterator iter = val.getColumnIterator(); + while ( iter.hasNext() ) { + columns.append( ( (Selectable) iter.next() ).getText() ); + if ( iter.hasNext() ) columns.append( ", " ); + } + return columns.toString(); + } + + /** + * Called for all collections + */ + public static void bindCollection(Element node, Collection collection, String className, + String path, Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + + // ROLENAME + collection.setRole(path); + + Attribute inverseNode = node.attribute( "inverse" ); + if ( inverseNode != null ) { + collection.setInverse( "true".equals( inverseNode.getValue() ) ); + } + + Attribute mutableNode = node.attribute( "mutable" ); + if ( mutableNode != null ) { + collection.setMutable( !"false".equals( mutableNode.getValue() ) ); + } + + Attribute olNode = node.attribute( "optimistic-lock" ); + collection.setOptimisticLocked( olNode == null || "true".equals( olNode.getValue() ) ); + + Attribute orderNode = node.attribute( "order-by" ); + if ( orderNode != null ) { + if ( Environment.jvmSupportsLinkedHashCollections() || ( collection instanceof Bag ) ) { + collection.setOrderBy( orderNode.getValue() ); + } + else { + log.warn( "Attribute \"order-by\" ignored in JDK1.3 or less" ); + } + } + Attribute whereNode = node.attribute( "where" ); + if ( whereNode != null ) { + collection.setWhere( whereNode.getValue() ); + } + Attribute batchNode = node.attribute( "batch-size" ); + if ( batchNode != null ) { + collection.setBatchSize( Integer.parseInt( batchNode.getValue() ) ); + } + + String nodeName = node.attributeValue( "node" ); + if ( nodeName == null ) nodeName = node.attributeValue( "name" ); + collection.setNodeName( nodeName ); + String embed = node.attributeValue( "embed-xml" ); + collection.setEmbedded( embed==null || "true".equals(embed) ); + + + // PERSISTER + Attribute persisterNode = node.attribute( "persister" ); + if ( persisterNode != null ) { + try { + collection.setCollectionPersisterClass( ReflectHelper.classForName( persisterNode + .getValue() ) ); + } + catch (ClassNotFoundException cnfe) { + throw new MappingException( "Could not find collection persister class: " + + persisterNode.getValue() ); + } + } + + Attribute typeNode = node.attribute( "collection-type" ); + if ( typeNode != null ) { + String typeName = typeNode.getValue(); + TypeDef typeDef = mappings.getTypeDef( typeName ); + if ( typeDef != null ) { + collection.setTypeName( typeDef.getTypeClass() ); + collection.setTypeParameters( typeDef.getParameters() ); + } + else { + collection.setTypeName( typeName ); + } + } + + // FETCH STRATEGY + + initOuterJoinFetchSetting( node, collection ); + + if ( "subselect".equals( node.attributeValue("fetch") ) ) { + collection.setSubselectLoadable(true); + collection.getOwner().setSubselectLoadableCollections(true); + } + + initLaziness( node, collection, mappings, "true", mappings.isDefaultLazy() ); + //TODO: suck this into initLaziness! + if ( "extra".equals( node.attributeValue("lazy") ) ) { + collection.setLazy(true); + collection.setExtraLazy(true); + } + + Element oneToManyNode = node.element( "one-to-many" ); + if ( oneToManyNode != null ) { + OneToMany oneToMany = new OneToMany( collection.getOwner() ); + collection.setElement( oneToMany ); + bindOneToMany( oneToManyNode, oneToMany, mappings ); + // we have to set up the table later!! yuck + } + else { + // TABLE + Attribute tableNode = node.attribute( "table" ); + String tableName; + if ( tableNode != null ) { + tableName = mappings.getNamingStrategy().tableName( tableNode.getValue() ); + } + else { + //tableName = mappings.getNamingStrategy().propertyToTableName( className, path ); + Table ownerTable = collection.getOwner().getTable(); + //TODO mappings.getLogicalTableName(ownerTable) + String logicalOwnerTableName = ownerTable.getName(); + //FIXME we don't have the associated entity table name here, has to be done in a second pass + tableName = mappings.getNamingStrategy().collectionTableName( + collection.getOwner().getEntityName(), + logicalOwnerTableName , + null, + null, + path + ); + } + Attribute schemaNode = node.attribute( "schema" ); + String schema = schemaNode == null ? + mappings.getSchemaName() : schemaNode.getValue(); + + Attribute catalogNode = node.attribute( "catalog" ); + String catalog = catalogNode == null ? + mappings.getCatalogName() : catalogNode.getValue(); + + Table table = mappings.addTable( + schema, + catalog, + tableName, + getSubselect( node ), + false + ); + collection.setCollectionTable( table ); + bindComment(table, node); + + log.info( + "Mapping collection: " + collection.getRole() + + " -> " + collection.getCollectionTable().getName() + ); + } + + // SORT + Attribute sortedAtt = node.attribute( "sort" ); + // unsorted, natural, comparator.class.name + if ( sortedAtt == null || sortedAtt.getValue().equals( "unsorted" ) ) { + collection.setSorted( false ); + } + else { + collection.setSorted( true ); + String comparatorClassName = sortedAtt.getValue(); + if ( !comparatorClassName.equals( "natural" ) ) { + collection.setComparatorClassName(comparatorClassName); + } + } + + // ORPHAN DELETE (used for programmer error detection) + Attribute cascadeAtt = node.attribute( "cascade" ); + if ( cascadeAtt != null && cascadeAtt.getValue().indexOf( "delete-orphan" ) >= 0 ) { + collection.setOrphanDelete( true ); + } + + // CUSTOM SQL + handleCustomSQL( node, collection ); + // set up second pass + if ( collection instanceof List ) { + mappings.addSecondPass( new ListSecondPass( node, mappings, (List) collection, inheritedMetas ) ); + } + else if ( collection instanceof Map ) { + mappings.addSecondPass( new MapSecondPass( node, mappings, (Map) collection, inheritedMetas ) ); + } + else if ( collection instanceof IdentifierCollection ) { + mappings.addSecondPass( new IdentifierCollectionSecondPass( + node, + mappings, + collection, + inheritedMetas + ) ); + } + else { + mappings.addSecondPass( new CollectionSecondPass( node, mappings, collection, inheritedMetas ) ); + } + + Iterator iter = node.elementIterator( "filter" ); + while ( iter.hasNext() ) { + final Element filter = (Element) iter.next(); + parseFilter( filter, collection, mappings ); + } + + Iterator tables = node.elementIterator( "synchronize" ); + while ( tables.hasNext() ) { + collection.getSynchronizedTables().add( + ( (Element) tables.next() ).attributeValue( "table" ) ); + } + + Element element = node.element( "loader" ); + if ( element != null ) { + collection.setLoaderName( element.attributeValue( "query-ref" ) ); + } + + collection.setReferencedPropertyName( node.element( "key" ).attributeValue( "property-ref" ) ); + } + + private static void initLaziness( + Element node, + Fetchable fetchable, + Mappings mappings, + String proxyVal, + boolean defaultLazy + ) { + Attribute lazyNode = node.attribute( "lazy" ); + boolean isLazyTrue = lazyNode == null ? + defaultLazy && fetchable.isLazy() : //fetch="join" overrides default laziness + lazyNode.getValue().equals(proxyVal); //fetch="join" overrides default laziness + fetchable.setLazy( isLazyTrue ); + } + + private static void initLaziness( + Element node, + ToOne fetchable, + Mappings mappings, + boolean defaultLazy + ) { + if ( "no-proxy".equals( node.attributeValue( "lazy" ) ) ) { + fetchable.setUnwrapProxy(true); + fetchable.setLazy(true); + //TODO: better to degrade to lazy="false" if uninstrumented + } + else { + initLaziness(node, fetchable, mappings, "proxy", defaultLazy); + } + } + + private static void bindColumnsOrFormula(Element node, SimpleValue simpleValue, String path, + boolean isNullable, Mappings mappings) { + Attribute formulaNode = node.attribute( "formula" ); + if ( formulaNode != null ) { + Formula f = new Formula(); + f.setFormula( formulaNode.getText() ); + simpleValue.addFormula( f ); + } + else { + bindColumns( node, simpleValue, isNullable, true, path, mappings ); + } + } + + private static void bindComment(Table table, Element node) { + Element comment = node.element("comment"); + if (comment!=null) table.setComment( comment.getTextTrim() ); + } + + public static void bindManyToOne(Element node, ManyToOne manyToOne, String path, + boolean isNullable, Mappings mappings) throws MappingException { + + bindColumnsOrFormula( node, manyToOne, path, isNullable, mappings ); + initOuterJoinFetchSetting( node, manyToOne ); + initLaziness( node, manyToOne, mappings, true ); + + Attribute ukName = node.attribute( "property-ref" ); + if ( ukName != null ) { + manyToOne.setReferencedPropertyName( ukName.getValue() ); + } + + manyToOne.setReferencedEntityName( getEntityName( node, mappings ) ); + + String embed = node.attributeValue( "embed-xml" ); + manyToOne.setEmbedded( embed == null || "true".equals( embed ) ); + + String notFound = node.attributeValue( "not-found" ); + manyToOne.setIgnoreNotFound( "ignore".equals( notFound ) ); + + if( ukName != null && !manyToOne.isIgnoreNotFound() ) { + if ( !node.getName().equals("many-to-many") ) { //TODO: really bad, evil hack to fix!!! + mappings.addSecondPass( new ManyToOneSecondPass(manyToOne) ); + } + } + + Attribute fkNode = node.attribute( "foreign-key" ); + if ( fkNode != null ) manyToOne.setForeignKeyName( fkNode.getValue() ); + + validateCascade( node, path ); + } + + private static void validateCascade(Element node, String path) { + String cascade = node.attributeValue("cascade"); + if ( cascade!=null && cascade.indexOf("delete-orphan")>0 ) { + throw new MappingException("single-valued associations do not support orphan delete: " + path); + } + } + + public static void bindAny(Element node, Any any, boolean isNullable, Mappings mappings) + throws MappingException { + any.setIdentifierType( getTypeFromXML( node ) ); + Attribute metaAttribute = node.attribute( "meta-type" ); + if ( metaAttribute != null ) { + any.setMetaType( metaAttribute.getValue() ); + + Iterator iter = node.elementIterator( "meta-value" ); + if ( iter.hasNext() ) { + HashMap values = new HashMap(); + org.hibernate.type.Type metaType = TypeFactory.heuristicType( any.getMetaType() ); + while ( iter.hasNext() ) { + Element metaValue = (Element) iter.next(); + try { + Object value = ( (DiscriminatorType) metaType ).stringToObject( metaValue + .attributeValue( "value" ) ); + String entityName = getClassName( metaValue.attribute( "class" ), mappings ); + values.put( value, entityName ); + } + catch (ClassCastException cce) { + throw new MappingException( "meta-type was not a DiscriminatorType: " + + metaType.getName() ); + } + catch (Exception e) { + throw new MappingException( "could not interpret meta-value", e ); + } + } + any.setMetaValues( values ); + } + + } + + bindColumns( node, any, isNullable, false, null, mappings ); + } + + public static void bindOneToOne(Element node, OneToOne oneToOne, String path, boolean isNullable, + Mappings mappings) throws MappingException { + + bindColumns( node, oneToOne, isNullable, false, null, mappings ); + + Attribute constrNode = node.attribute( "constrained" ); + boolean constrained = constrNode != null && constrNode.getValue().equals( "true" ); + oneToOne.setConstrained( constrained ); + + oneToOne.setForeignKeyType( constrained ? + ForeignKeyDirection.FOREIGN_KEY_FROM_PARENT : + ForeignKeyDirection.FOREIGN_KEY_TO_PARENT ); + + initOuterJoinFetchSetting( node, oneToOne ); + initLaziness( node, oneToOne, mappings, true ); + + oneToOne.setEmbedded( "true".equals( node.attributeValue( "embed-xml" ) ) ); + + Attribute fkNode = node.attribute( "foreign-key" ); + if ( fkNode != null ) oneToOne.setForeignKeyName( fkNode.getValue() ); + + Attribute ukName = node.attribute( "property-ref" ); + if ( ukName != null ) oneToOne.setReferencedPropertyName( ukName.getValue() ); + + oneToOne.setPropertyName( node.attributeValue( "name" ) ); + + oneToOne.setReferencedEntityName( getEntityName( node, mappings ) ); + + validateCascade( node, path ); + } + + public static void bindOneToMany(Element node, OneToMany oneToMany, Mappings mappings) + throws MappingException { + + oneToMany.setReferencedEntityName( getEntityName( node, mappings ) ); + + String embed = node.attributeValue( "embed-xml" ); + oneToMany.setEmbedded( embed == null || "true".equals( embed ) ); + + String notFound = node.attributeValue( "not-found" ); + oneToMany.setIgnoreNotFound( "ignore".equals( notFound ) ); + + } + + public static void bindColumn(Element node, Column column, boolean isNullable) { + Attribute lengthNode = node.attribute( "length" ); + if ( lengthNode != null ) column.setLength( Integer.parseInt( lengthNode.getValue() ) ); + Attribute scalNode = node.attribute( "scale" ); + if ( scalNode != null ) column.setScale( Integer.parseInt( scalNode.getValue() ) ); + Attribute precNode = node.attribute( "precision" ); + if ( precNode != null ) column.setPrecision( Integer.parseInt( precNode.getValue() ) ); + + Attribute nullNode = node.attribute( "not-null" ); + column.setNullable( nullNode == null ? isNullable : nullNode.getValue().equals( "false" ) ); + + Attribute unqNode = node.attribute( "unique" ); + if ( unqNode != null ) column.setUnique( unqNode.getValue().equals( "true" ) ); + + column.setCheckConstraint( node.attributeValue( "check" ) ); + column.setDefaultValue( node.attributeValue( "default" ) ); + + Attribute typeNode = node.attribute( "sql-type" ); + if ( typeNode != null ) column.setSqlType( typeNode.getValue() ); + + Element comment = node.element("comment"); + if (comment!=null) column.setComment( comment.getTextTrim() ); + + } + + /** + * Called for arrays and primitive arrays + */ + public static void bindArray(Element node, Array array, String prefix, String path, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + + bindCollection( node, array, prefix, path, mappings, inheritedMetas ); + + Attribute att = node.attribute( "element-class" ); + if ( att != null ) array.setElementClassName( getClassName( att, mappings ) ); + + } + + private static Class reflectedPropertyClass(String className, String propertyName) + throws MappingException { + if ( className == null ) return null; + return ReflectHelper.reflectedPropertyClass( className, propertyName ); + } + + public static void bindComposite(Element node, Component component, String path, + boolean isNullable, Mappings mappings, java.util.Map inheritedMetas) + throws MappingException { + bindComponent( + node, + component, + null, + null, + path, + isNullable, + false, + mappings, + inheritedMetas, + false + ); + } + + public static void bindCompositeId(Element node, Component component, + PersistentClass persistentClass, String propertyName, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + + component.setKey( true ); + + String path = StringHelper.qualify( + persistentClass.getEntityName(), + propertyName == null ? "id" : propertyName ); + + bindComponent( + node, + component, + persistentClass.getClassName(), + propertyName, + path, + false, + node.attribute( "class" ) == null + && propertyName == null, + mappings, + inheritedMetas, + false + ); + + if ( "true".equals( node.attributeValue("mapped") ) ) { + if ( propertyName!=null ) { + throw new MappingException("cannot combine mapped=\"true\" with specified name"); + } + Component mapper = new Component(persistentClass); + bindComponent( + node, + mapper, + persistentClass.getClassName(), + null, + path, + false, + true, + mappings, + inheritedMetas, + true + ); + persistentClass.setIdentifierMapper(mapper); + Property property = new Property(); + property.setName("_identifierMapper"); + property.setNodeName("id"); + property.setUpdateable(false); + property.setInsertable(false); + property.setValue(mapper); + property.setPropertyAccessorName( "embedded" ); + persistentClass.addProperty(property); + } + + } + + public static void bindComponent( + Element node, + Component component, + String ownerClassName, + String parentProperty, + String path, + boolean isNullable, + boolean isEmbedded, + Mappings mappings, + java.util.Map inheritedMetas, + boolean isIdentifierMapper) throws MappingException { + + component.setEmbedded( isEmbedded ); + component.setRoleName( path ); + + inheritedMetas = getMetas( node, inheritedMetas ); + component.setMetaAttributes( inheritedMetas ); + + Attribute classNode = isIdentifierMapper ? null : node.attribute( "class" ); + if ( classNode != null ) { + component.setComponentClassName( getClassName( classNode, mappings ) ); + } + else if ( "dynamic-component".equals( node.getName() ) ) { + component.setDynamic( true ); + } + else if ( isEmbedded ) { + // an "embedded" component (composite ids and unique) + // note that this does not handle nested components + if ( component.getOwner().hasPojoRepresentation() ) { + component.setComponentClassName( component.getOwner().getClassName() ); + } + else { + component.setDynamic(true); + } + } + else { + // todo : again, how *should* this work for non-pojo entities? + if ( component.getOwner().hasPojoRepresentation() ) { + Class reflectedClass = reflectedPropertyClass( ownerClassName, parentProperty ); + if ( reflectedClass != null ) { + component.setComponentClassName( reflectedClass.getName() ); + } + } + else { + component.setDynamic(true); + } + } + + String nodeName = node.attributeValue( "node" ); + if ( nodeName == null ) nodeName = node.attributeValue( "name" ); + if ( nodeName == null ) nodeName = component.getOwner().getNodeName(); + component.setNodeName( nodeName ); + + Iterator iter = node.elementIterator(); + while ( iter.hasNext() ) { + + Element subnode = (Element) iter.next(); + String name = subnode.getName(); + String propertyName = getPropertyName( subnode ); + String subpath = propertyName == null ? null : StringHelper + .qualify( path, propertyName ); + + CollectionType collectType = CollectionType.collectionTypeFromString( name ); + Value value = null; + if ( collectType != null ) { + Collection collection = collectType.create( + subnode, + subpath, + component.getOwner(), + mappings, inheritedMetas + ); + mappings.addCollection( collection ); + value = collection; + } + else if ( "many-to-one".equals( name ) || "key-many-to-one".equals( name ) ) { + value = new ManyToOne( component.getTable() ); + String relativePath; + if (isEmbedded) { + relativePath = propertyName; + } + else { + relativePath = subpath.substring( component.getOwner().getEntityName().length() + 1 ); + } + bindManyToOne( subnode, (ManyToOne) value, relativePath, isNullable, mappings ); + } + else if ( "one-to-one".equals( name ) ) { + value = new OneToOne( component.getTable(), component.getOwner() ); + String relativePath; + if (isEmbedded) { + relativePath = propertyName; + } + else { + relativePath = subpath.substring( component.getOwner().getEntityName().length() + 1 ); + } + bindOneToOne( subnode, (OneToOne) value, relativePath, isNullable, mappings ); + } + else if ( "any".equals( name ) ) { + value = new Any( component.getTable() ); + bindAny( subnode, (Any) value, isNullable, mappings ); + } + else if ( "property".equals( name ) || "key-property".equals( name ) ) { + value = new SimpleValue( component.getTable() ); + String relativePath; + if (isEmbedded) { + relativePath = propertyName; + } + else { + relativePath = subpath.substring( component.getOwner().getEntityName().length() + 1 ); + } + bindSimpleValue( subnode, (SimpleValue) value, isNullable, relativePath, mappings ); + } + else if ( "component".equals( name ) + || "dynamic-component".equals( name ) + || "nested-composite-element".equals( name ) ) { + value = new Component( component ); // a nested composite element + bindComponent( + subnode, + (Component) value, + component.getComponentClassName(), + propertyName, + subpath, + isNullable, + isEmbedded, + mappings, + inheritedMetas, + isIdentifierMapper + ); + } + else if ( "parent".equals( name ) ) { + component.setParentProperty( propertyName ); + } + + if ( value != null ) { + Property property = createProperty( value, propertyName, component + .getComponentClassName(), subnode, mappings, inheritedMetas ); + if (isIdentifierMapper) { + property.setInsertable(false); + property.setUpdateable(false); + } + component.addProperty( property ); + } + } + + if ( "true".equals( node.attributeValue( "unique" ) ) ) { + iter = component.getColumnIterator(); + ArrayList cols = new ArrayList(); + while ( iter.hasNext() ) { + cols.add( iter.next() ); + } + component.getOwner().getTable().createUniqueKey( cols ); + } + + iter = node.elementIterator( "tuplizer" ); + while ( iter.hasNext() ) { + final Element tuplizerElem = ( Element ) iter.next(); + EntityMode mode = EntityMode.parse( tuplizerElem.attributeValue( "entity-mode" ) ); + component.addTuplizer( mode, tuplizerElem.attributeValue( "class" ) ); + } + } + + public static String getTypeFromXML(Element node) throws MappingException { + // TODO: handle TypeDefs + Attribute typeNode = node.attribute( "type" ); + if ( typeNode == null ) typeNode = node.attribute( "id-type" ); // for an any + if ( typeNode == null ) return null; // we will have to use reflection + return typeNode.getValue(); + } + + private static void initOuterJoinFetchSetting(Element node, Fetchable model) { + Attribute fetchNode = node.attribute( "fetch" ); + final FetchMode fetchStyle; + boolean lazy = true; + if ( fetchNode == null ) { + Attribute jfNode = node.attribute( "outer-join" ); + if ( jfNode == null ) { + if ( "many-to-many".equals( node.getName() ) ) { + //NOTE SPECIAL CASE: + // default to join and non-lazy for the "second join" + // of the many-to-many + lazy = false; + fetchStyle = FetchMode.JOIN; + } + else if ( "one-to-one".equals( node.getName() ) ) { + //NOTE SPECIAL CASE: + // one-to-one constrained=false cannot be proxied, + // so default to join and non-lazy + lazy = ( (OneToOne) model ).isConstrained(); + fetchStyle = lazy ? FetchMode.DEFAULT : FetchMode.JOIN; + } + else { + fetchStyle = FetchMode.DEFAULT; + } + } + else { + // use old (HB 2.1) defaults if outer-join is specified + String eoj = jfNode.getValue(); + if ( "auto".equals( eoj ) ) { + fetchStyle = FetchMode.DEFAULT; + } + else { + boolean join = "true".equals( eoj ); + fetchStyle = join ? FetchMode.JOIN : FetchMode.SELECT; + } + } + } + else { + boolean join = "join".equals( fetchNode.getValue() ); + //lazy = !join; + fetchStyle = join ? FetchMode.JOIN : FetchMode.SELECT; + } + model.setFetchMode( fetchStyle ); + model.setLazy(lazy); + } + + private static void makeIdentifier(Element node, SimpleValue model, Mappings mappings) { + + // GENERATOR + Element subnode = node.element( "generator" ); + if ( subnode != null ) { + model.setIdentifierGeneratorStrategy( subnode.attributeValue( "class" ) ); + + Properties params = new Properties(); + + if ( mappings.getSchemaName() != null ) { + params.setProperty( PersistentIdentifierGenerator.SCHEMA, mappings.getSchemaName() ); + } + if ( mappings.getCatalogName() != null ) { + params.setProperty( PersistentIdentifierGenerator.CATALOG, mappings.getCatalogName() ); + } + + Iterator iter = subnode.elementIterator( "param" ); + while ( iter.hasNext() ) { + Element childNode = (Element) iter.next(); + params.setProperty( childNode.attributeValue( "name" ), childNode.getText() ); + } + + model.setIdentifierGeneratorProperties( params ); + } + + model.getTable().setIdentifierValue( model ); + + // ID UNSAVED-VALUE + Attribute nullValueNode = node.attribute( "unsaved-value" ); + if ( nullValueNode != null ) { + model.setNullValue( nullValueNode.getValue() ); + } + else { + if ( "assigned".equals( model.getIdentifierGeneratorStrategy() ) ) { + model.setNullValue( "undefined" ); + } + else { + model.setNullValue( null ); + } + } + } + + private static final void makeVersion(Element node, SimpleValue model) { + + // VERSION UNSAVED-VALUE + Attribute nullValueNode = node.attribute( "unsaved-value" ); + if ( nullValueNode != null ) { + model.setNullValue( nullValueNode.getValue() ); + } + else { + model.setNullValue( "undefined" ); + } + + } + + protected static void createClassProperties(Element node, PersistentClass persistentClass, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + createClassProperties(node, persistentClass, mappings, inheritedMetas, null, true, true, false); + } + + protected static void createClassProperties(Element node, PersistentClass persistentClass, + Mappings mappings, java.util.Map inheritedMetas, UniqueKey uniqueKey, + boolean mutable, boolean nullable, boolean naturalId) throws MappingException { + + String entityName = persistentClass.getEntityName(); + Table table = persistentClass.getTable(); + + Iterator iter = node.elementIterator(); + while ( iter.hasNext() ) { + Element subnode = (Element) iter.next(); + String name = subnode.getName(); + String propertyName = subnode.attributeValue( "name" ); + + CollectionType collectType = CollectionType.collectionTypeFromString( name ); + Value value = null; + if ( collectType != null ) { + Collection collection = collectType.create( + subnode, + StringHelper.qualify( entityName, propertyName ), + persistentClass, + mappings, inheritedMetas + ); + mappings.addCollection( collection ); + value = collection; + } + else if ( "many-to-one".equals( name ) ) { + value = new ManyToOne( table ); + bindManyToOne( subnode, (ManyToOne) value, propertyName, nullable, mappings ); + } + else if ( "any".equals( name ) ) { + value = new Any( table ); + bindAny( subnode, (Any) value, nullable, mappings ); + } + else if ( "one-to-one".equals( name ) ) { + value = new OneToOne( table, persistentClass ); + bindOneToOne( subnode, (OneToOne) value, propertyName, true, mappings ); + } + else if ( "property".equals( name ) ) { + value = new SimpleValue( table ); + bindSimpleValue( subnode, (SimpleValue) value, nullable, propertyName, mappings ); + } + else if ( "component".equals( name ) + || "dynamic-component".equals( name ) + || "properties".equals( name ) ) { + String subpath = StringHelper.qualify( entityName, propertyName ); + value = new Component( persistentClass ); + + bindComponent( + subnode, + (Component) value, + persistentClass.getClassName(), + propertyName, + subpath, + true, + "properties".equals( name ), + mappings, + inheritedMetas, + false + ); + } + else if ( "join".equals( name ) ) { + Join join = new Join(); + join.setPersistentClass( persistentClass ); + bindJoin( subnode, join, mappings, inheritedMetas ); + persistentClass.addJoin( join ); + } + else if ( "subclass".equals( name ) ) { + handleSubclass( persistentClass, mappings, subnode, inheritedMetas ); + } + else if ( "joined-subclass".equals( name ) ) { + handleJoinedSubclass( persistentClass, mappings, subnode, inheritedMetas ); + } + else if ( "union-subclass".equals( name ) ) { + handleUnionSubclass( persistentClass, mappings, subnode, inheritedMetas ); + } + else if ( "filter".equals( name ) ) { + parseFilter( subnode, persistentClass, mappings ); + } + else if ( "natural-id".equals( name ) ) { + UniqueKey uk = new UniqueKey(); + uk.setName("_UniqueKey"); + uk.setTable(table); + //by default, natural-ids are "immutable" (constant) + boolean mutableId = "true".equals( subnode.attributeValue("mutable") ); + createClassProperties( + subnode, + persistentClass, + mappings, + inheritedMetas, + uk, + mutableId, + false, + true + ); + table.addUniqueKey(uk); + } + else if ( "query".equals(name) ) { + bindNamedQuery(subnode, persistentClass.getEntityName(), mappings); + } + else if ( "sql-query".equals(name) ) { + bindNamedSQLQuery(subnode, persistentClass.getEntityName(), mappings); + } + else if ( "resultset".equals(name) ) { + bindResultSetMappingDefinition( subnode, persistentClass.getEntityName(), mappings ); + } + + if ( value != null ) { + Property property = createProperty( value, propertyName, persistentClass + .getClassName(), subnode, mappings, inheritedMetas ); + if ( !mutable ) property.setUpdateable(false); + if ( naturalId ) property.setNaturalIdentifier(true); + persistentClass.addProperty( property ); + if ( uniqueKey!=null ) uniqueKey.addColumns( property.getColumnIterator() ); + } + + } + } + + private static Property createProperty( + final Value value, + final String propertyName, + final String className, + final Element subnode, + final Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + + if ( StringHelper.isEmpty( propertyName ) ) { + throw new MappingException( subnode.getName() + " mapping must defined a name attribute [" + className + "]" ); + } + + value.setTypeUsingReflection( className, propertyName ); + + // this is done here 'cos we might only know the type here (ugly!) + // TODO: improve this a lot: + if ( value instanceof ToOne ) { + ToOne toOne = (ToOne) value; + String propertyRef = toOne.getReferencedPropertyName(); + if ( propertyRef != null ) { + mappings.addUniquePropertyReference( toOne.getReferencedEntityName(), propertyRef ); + } + } + else if ( value instanceof Collection ) { + Collection coll = (Collection) value; + String propertyRef = coll.getReferencedPropertyName(); + // not necessarily a *unique* property reference + if ( propertyRef != null ) { + mappings.addPropertyReference( coll.getOwnerEntityName(), propertyRef ); + } + } + + value.createForeignKey(); + Property prop = new Property(); + prop.setValue( value ); + bindProperty( subnode, prop, mappings, inheritedMetas ); + return prop; + } + + private static void handleUnionSubclass(PersistentClass model, Mappings mappings, + Element subnode, java.util.Map inheritedMetas) throws MappingException { + UnionSubclass subclass = new UnionSubclass( model ); + bindUnionSubclass( subnode, subclass, mappings, inheritedMetas ); + model.addSubclass( subclass ); + mappings.addClass( subclass ); + } + + private static void handleJoinedSubclass(PersistentClass model, Mappings mappings, + Element subnode, java.util.Map inheritedMetas) throws MappingException { + JoinedSubclass subclass = new JoinedSubclass( model ); + bindJoinedSubclass( subnode, subclass, mappings, inheritedMetas ); + model.addSubclass( subclass ); + mappings.addClass( subclass ); + } + + private static void handleSubclass(PersistentClass model, Mappings mappings, Element subnode, + java.util.Map inheritedMetas) throws MappingException { + Subclass subclass = new SingleTableSubclass( model ); + bindSubclass( subnode, subclass, mappings, inheritedMetas ); + model.addSubclass( subclass ); + mappings.addClass( subclass ); + } + + /** + * Called for Lists, arrays, primitive arrays + */ + public static void bindListSecondPass(Element node, List list, java.util.Map classes, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + + bindCollectionSecondPass( node, list, classes, mappings, inheritedMetas ); + + Element subnode = node.element( "list-index" ); + if ( subnode == null ) subnode = node.element( "index" ); + SimpleValue iv = new SimpleValue( list.getCollectionTable() ); + bindSimpleValue( + subnode, + iv, + list.isOneToMany(), + IndexedCollection.DEFAULT_INDEX_COLUMN_NAME, + mappings + ); + iv.setTypeName( "integer" ); + list.setIndex( iv ); + String baseIndex = subnode.attributeValue( "base" ); + if ( baseIndex != null ) list.setBaseIndex( Integer.parseInt( baseIndex ) ); + list.setIndexNodeName( subnode.attributeValue("node") ); + + if ( list.isOneToMany() && !list.getKey().isNullable() && !list.isInverse() ) { + String entityName = ( (OneToMany) list.getElement() ).getReferencedEntityName(); + PersistentClass referenced = mappings.getClass( entityName ); + IndexBackref ib = new IndexBackref(); + ib.setName( '_' + node.attributeValue( "name" ) + "IndexBackref" ); + ib.setUpdateable( false ); + ib.setSelectable( false ); + ib.setCollectionRole( list.getRole() ); + ib.setEntityName( list.getOwner().getEntityName() ); + ib.setValue( list.getIndex() ); + // ( (Column) ( (SimpleValue) ic.getIndex() ).getColumnIterator().next() + // ).setNullable(false); + referenced.addProperty( ib ); + } + } + + public static void bindIdentifierCollectionSecondPass(Element node, + IdentifierCollection collection, java.util.Map persistentClasses, Mappings mappings, + java.util.Map inheritedMetas) throws MappingException { + + bindCollectionSecondPass( node, collection, persistentClasses, mappings, inheritedMetas ); + + Element subnode = node.element( "collection-id" ); + SimpleValue id = new SimpleValue( collection.getCollectionTable() ); + bindSimpleValue( + subnode, + id, + false, + IdentifierCollection.DEFAULT_IDENTIFIER_COLUMN_NAME, + mappings + ); + collection.setIdentifier( id ); + makeIdentifier( subnode, id, mappings ); + + } + + /** + * Called for Maps + */ + public static void bindMapSecondPass(Element node, Map map, java.util.Map classes, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + + bindCollectionSecondPass( node, map, classes, mappings, inheritedMetas ); + + Iterator iter = node.elementIterator(); + while ( iter.hasNext() ) { + Element subnode = (Element) iter.next(); + String name = subnode.getName(); + + if ( "index".equals( name ) || "map-key".equals( name ) ) { + SimpleValue value = new SimpleValue( map.getCollectionTable() ); + bindSimpleValue( + subnode, + value, + map.isOneToMany(), + IndexedCollection.DEFAULT_INDEX_COLUMN_NAME, + mappings + ); + if ( !value.isTypeSpecified() ) { + throw new MappingException( "map index element must specify a type: " + + map.getRole() ); + } + map.setIndex( value ); + map.setIndexNodeName( subnode.attributeValue("node") ); + } + else if ( "index-many-to-many".equals( name ) || "map-key-many-to-many".equals( name ) ) { + ManyToOne mto = new ManyToOne( map.getCollectionTable() ); + bindManyToOne( + subnode, + mto, + IndexedCollection.DEFAULT_INDEX_COLUMN_NAME, + map.isOneToMany(), + mappings + ); + map.setIndex( mto ); + + } + else if ( "composite-index".equals( name ) || "composite-map-key".equals( name ) ) { + Component component = new Component( map ); + bindComposite( + subnode, + component, + map.getRole() + ".index", + map.isOneToMany(), + mappings, + inheritedMetas + ); + map.setIndex( component ); + } + else if ( "index-many-to-any".equals( name ) ) { + Any any = new Any( map.getCollectionTable() ); + bindAny( subnode, any, map.isOneToMany(), mappings ); + map.setIndex( any ); + } + } + + // TODO: this is a bit of copy/paste from IndexedCollection.createPrimaryKey() + boolean indexIsFormula = false; + Iterator colIter = map.getIndex().getColumnIterator(); + while ( colIter.hasNext() ) { + if ( ( (Selectable) colIter.next() ).isFormula() ) indexIsFormula = true; + } + + if ( map.isOneToMany() && !map.getKey().isNullable() && !map.isInverse() && !indexIsFormula ) { + String entityName = ( (OneToMany) map.getElement() ).getReferencedEntityName(); + PersistentClass referenced = mappings.getClass( entityName ); + IndexBackref ib = new IndexBackref(); + ib.setName( '_' + node.attributeValue( "name" ) + "IndexBackref" ); + ib.setUpdateable( false ); + ib.setSelectable( false ); + ib.setCollectionRole( map.getRole() ); + ib.setEntityName( map.getOwner().getEntityName() ); + ib.setValue( map.getIndex() ); + // ( (Column) ( (SimpleValue) ic.getIndex() ).getColumnIterator().next() + // ).setNullable(false); + referenced.addProperty( ib ); + } + } + + /** + * Called for all collections + */ + public static void bindCollectionSecondPass(Element node, Collection collection, + java.util.Map persistentClasses, Mappings mappings, java.util.Map inheritedMetas) + throws MappingException { + + if ( collection.isOneToMany() ) { + OneToMany oneToMany = (OneToMany) collection.getElement(); + String assocClass = oneToMany.getReferencedEntityName(); + PersistentClass persistentClass = (PersistentClass) persistentClasses.get( assocClass ); + if ( persistentClass == null ) { + throw new MappingException( "Association references unmapped class: " + assocClass ); + } + oneToMany.setAssociatedClass( persistentClass ); + collection.setCollectionTable( persistentClass.getTable() ); + + log.info( + "Mapping collection: " + collection.getRole() + + " -> " + collection.getCollectionTable().getName() + ); + } + + // CHECK + Attribute chNode = node.attribute( "check" ); + if ( chNode != null ) { + collection.getCollectionTable().addCheckConstraint( chNode.getValue() ); + } + + // contained elements: + Iterator iter = node.elementIterator(); + while ( iter.hasNext() ) { + Element subnode = (Element) iter.next(); + String name = subnode.getName(); + + if ( "key".equals( name ) ) { + KeyValue keyVal; + String propRef = collection.getReferencedPropertyName(); + if ( propRef == null ) { + keyVal = collection.getOwner().getIdentifier(); + } + else { + keyVal = (KeyValue) collection.getOwner().getReferencedProperty( propRef ).getValue(); + } + SimpleValue key = new DependantValue( collection.getCollectionTable(), keyVal ); + key.setCascadeDeleteEnabled( "cascade" + .equals( subnode.attributeValue( "on-delete" ) ) ); + bindSimpleValue( + subnode, + key, + collection.isOneToMany(), + Collection.DEFAULT_KEY_COLUMN_NAME, + mappings + ); + collection.setKey( key ); + + Attribute notNull = subnode.attribute( "not-null" ); + ( (DependantValue) key ).setNullable( notNull == null + || notNull.getValue().equals( "false" ) ); + Attribute updateable = subnode.attribute( "update" ); + ( (DependantValue) key ).setUpdateable( updateable == null + || updateable.getValue().equals( "true" ) ); + + } + else if ( "element".equals( name ) ) { + SimpleValue elt = new SimpleValue( collection.getCollectionTable() ); + collection.setElement( elt ); + bindSimpleValue( + subnode, + elt, + true, + Collection.DEFAULT_ELEMENT_COLUMN_NAME, + mappings + ); + } + else if ( "many-to-many".equals( name ) ) { + ManyToOne element = new ManyToOne( collection.getCollectionTable() ); + collection.setElement( element ); + bindManyToOne( + subnode, + element, + Collection.DEFAULT_ELEMENT_COLUMN_NAME, + false, + mappings + ); + bindManyToManySubelements( collection, subnode, mappings ); + } + else if ( "composite-element".equals( name ) ) { + Component element = new Component( collection ); + collection.setElement( element ); + bindComposite( + subnode, + element, + collection.getRole() + ".element", + true, + mappings, + inheritedMetas + ); + } + else if ( "many-to-any".equals( name ) ) { + Any element = new Any( collection.getCollectionTable() ); + collection.setElement( element ); + bindAny( subnode, element, true, mappings ); + } + else if ( "cache".equals( name ) ) { + collection.setCacheConcurrencyStrategy( subnode.attributeValue( "usage" ) ); + collection.setCacheRegionName( subnode.attributeValue( "region" ) ); + } + + String nodeName = subnode.attributeValue( "node" ); + if ( nodeName != null ) collection.setElementNodeName( nodeName ); + + } + + if ( collection.isOneToMany() + && !collection.isInverse() + && !collection.getKey().isNullable() ) { + // for non-inverse one-to-many, with a not-null fk, add a backref! + String entityName = ( (OneToMany) collection.getElement() ).getReferencedEntityName(); + PersistentClass referenced = mappings.getClass( entityName ); + Backref prop = new Backref(); + prop.setName( '_' + node.attributeValue( "name" ) + "Backref" ); + prop.setUpdateable( false ); + prop.setSelectable( false ); + prop.setCollectionRole( collection.getRole() ); + prop.setEntityName( collection.getOwner().getEntityName() ); + prop.setValue( collection.getKey() ); + referenced.addProperty( prop ); + } + } + + private static void bindManyToManySubelements( + Collection collection, + Element manyToManyNode, + Mappings model) throws MappingException { + // Bind the where + Attribute where = manyToManyNode.attribute( "where" ); + String whereCondition = where == null ? null : where.getValue(); + collection.setManyToManyWhere( whereCondition ); + + // Bind the order-by + Attribute order = manyToManyNode.attribute( "order-by" ); + String orderFragment = order == null ? null : order.getValue(); + collection.setManyToManyOrdering( orderFragment ); + + // Bind the filters + Iterator filters = manyToManyNode.elementIterator( "filter" ); + if ( ( filters.hasNext() || whereCondition != null ) && + collection.getFetchMode() == FetchMode.JOIN && + collection.getElement().getFetchMode() != FetchMode.JOIN ) { + throw new MappingException( + "many-to-many defining filter or where without join fetching " + + "not valid within collection using join fetching [" + collection.getRole() + "]" + ); + } + while ( filters.hasNext() ) { + final Element filterElement = ( Element ) filters.next(); + final String name = filterElement.attributeValue( "name" ); + String condition = filterElement.getTextTrim(); + if ( StringHelper.isEmpty(condition) ) condition = filterElement.attributeValue( "condition" ); + if ( StringHelper.isEmpty(condition) ) { + condition = model.getFilterDefinition(name).getDefaultFilterCondition(); + } + if ( condition==null) { + throw new MappingException("no filter condition found for filter: " + name); + } + log.debug( + "Applying many-to-many filter [" + name + + "] as [" + condition + + "] to role [" + collection.getRole() + "]" + ); + collection.addManyToManyFilter( name, condition ); + } + } + + public static final FlushMode getFlushMode(String flushMode) { + if ( flushMode == null ) { + return null; + } + else if ( "auto".equals( flushMode ) ) { + return FlushMode.AUTO; + } + else if ( "commit".equals( flushMode ) ) { + return FlushMode.COMMIT; + } + else if ( "never".equals( flushMode ) ) { + return FlushMode.NEVER; + } + else if ( "manual".equals( flushMode ) ) { + return FlushMode.MANUAL; + } + else if ( "always".equals( flushMode ) ) { + return FlushMode.ALWAYS; + } + else { + throw new MappingException( "unknown flushmode" ); + } + } + + private static void bindNamedQuery(Element queryElem, String path, Mappings mappings) { + String queryName = queryElem.attributeValue( "name" ); + if (path!=null) queryName = path + '.' + queryName; + String query = queryElem.getText(); + log.debug( "Named query: " + queryName + " -> " + query ); + + boolean cacheable = "true".equals( queryElem.attributeValue( "cacheable" ) ); + String region = queryElem.attributeValue( "cache-region" ); + Attribute tAtt = queryElem.attribute( "timeout" ); + Integer timeout = tAtt == null ? null : new Integer( tAtt.getValue() ); + Attribute fsAtt = queryElem.attribute( "fetch-size" ); + Integer fetchSize = fsAtt == null ? null : new Integer( fsAtt.getValue() ); + Attribute roAttr = queryElem.attribute( "read-only" ); + boolean readOnly = roAttr != null && "true".equals( roAttr.getValue() ); + Attribute cacheModeAtt = queryElem.attribute( "cache-mode" ); + String cacheMode = cacheModeAtt == null ? null : cacheModeAtt.getValue(); + Attribute cmAtt = queryElem.attribute( "comment" ); + String comment = cmAtt == null ? null : cmAtt.getValue(); + + NamedQueryDefinition namedQuery = new NamedQueryDefinition( + query, + cacheable, + region, + timeout, + fetchSize, + getFlushMode( queryElem.attributeValue( "flush-mode" ) ) , + getCacheMode( cacheMode ), + readOnly, + comment, + getParameterTypes(queryElem) + ); + + mappings.addQuery( queryName, namedQuery ); + } + + public static CacheMode getCacheMode(String cacheMode) { + if (cacheMode == null) return null; + if ( "get".equals( cacheMode ) ) return CacheMode.GET; + if ( "ignore".equals( cacheMode ) ) return CacheMode.IGNORE; + if ( "normal".equals( cacheMode ) ) return CacheMode.NORMAL; + if ( "put".equals( cacheMode ) ) return CacheMode.PUT; + if ( "refresh".equals( cacheMode ) ) return CacheMode.REFRESH; + throw new MappingException("Unknown Cache Mode: " + cacheMode); + } + + public static java.util.Map getParameterTypes(Element queryElem) { + java.util.Map result = new SequencedHashMap(); + Iterator iter = queryElem.elementIterator("query-param"); + while ( iter.hasNext() ) { + Element element = (Element) iter.next(); + result.put( + element.attributeValue("name"), + element.attributeValue("type") + ); + } + return result; + } + + private static void bindResultSetMappingDefinition(Element resultSetElem, String path, Mappings mappings) { + mappings.addSecondPass( new ResultSetMappingSecondPass( resultSetElem, path, mappings ) ); + } + + private static void bindNamedSQLQuery(Element queryElem, String path, Mappings mappings) { + mappings.addSecondPass( new NamedSQLQuerySecondPass( queryElem, path, mappings ) ); + } + + private static String getPropertyName(Element node) { + return node.attributeValue( "name" ); + } + + private static PersistentClass getSuperclass(Mappings mappings, Element subnode) + throws MappingException { + String extendsName = subnode.attributeValue( "extends" ); + PersistentClass superModel = mappings.getClass( extendsName ); + if ( superModel == null ) { + String qualifiedExtendsName = getClassName( extendsName, mappings ); + superModel = mappings.getClass( qualifiedExtendsName ); + } + + if ( superModel == null ) { + throw new MappingException( "Cannot extend unmapped class " + extendsName ); + } + return superModel; + } + + static class CollectionSecondPass extends org.hibernate.cfg.CollectionSecondPass { + Element node; + + CollectionSecondPass(Element node, Mappings mappings, Collection collection, java.util.Map inheritedMetas) { + super(mappings, collection, inheritedMetas); + this.node = node; + } + + public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas) + throws MappingException { + HbmBinder.bindCollectionSecondPass( + node, + collection, + persistentClasses, + mappings, + inheritedMetas + ); + } + } + + static class IdentifierCollectionSecondPass extends CollectionSecondPass { + IdentifierCollectionSecondPass(Element node, Mappings mappings, Collection collection, java.util.Map inheritedMetas) { + super( node, mappings, collection, inheritedMetas ); + } + + public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas) + throws MappingException { + HbmBinder.bindIdentifierCollectionSecondPass( + node, + (IdentifierCollection) collection, + persistentClasses, + mappings, + inheritedMetas + ); + } + + } + + static class MapSecondPass extends CollectionSecondPass { + MapSecondPass(Element node, Mappings mappings, Map collection, java.util.Map inheritedMetas) { + super( node, mappings, collection, inheritedMetas ); + } + + public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas) + throws MappingException { + HbmBinder.bindMapSecondPass( + node, + (Map) collection, + persistentClasses, + mappings, + inheritedMetas + ); + } + + } + + + static class ManyToOneSecondPass implements SecondPass { + private final ManyToOne manyToOne; + + ManyToOneSecondPass(ManyToOne manyToOne) { + this.manyToOne = manyToOne; + } + + public void doSecondPass(java.util.Map persistentClasses) throws MappingException { + manyToOne.createPropertyRefConstraints(persistentClasses); + } + + } + + static class ListSecondPass extends CollectionSecondPass { + ListSecondPass(Element node, Mappings mappings, List collection, java.util.Map inheritedMetas) { + super( node, mappings, collection, inheritedMetas ); + } + + public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas) + throws MappingException { + HbmBinder.bindListSecondPass( + node, + (List) collection, + persistentClasses, + mappings, + inheritedMetas + ); + } + + } + + // This inner class implements a case statement....perhaps im being a bit over-clever here + abstract static class CollectionType { + private String xmlTag; + + public abstract Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException; + + CollectionType(String xmlTag) { + this.xmlTag = xmlTag; + } + + public String toString() { + return xmlTag; + } + + private static final CollectionType MAP = new CollectionType( "map" ) { + public Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + Map map = new Map( owner ); + bindCollection( node, map, owner.getEntityName(), path, mappings, inheritedMetas ); + return map; + } + }; + private static final CollectionType SET = new CollectionType( "set" ) { + public Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + Set set = new Set( owner ); + bindCollection( node, set, owner.getEntityName(), path, mappings, inheritedMetas ); + return set; + } + }; + private static final CollectionType LIST = new CollectionType( "list" ) { + public Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + List list = new List( owner ); + bindCollection( node, list, owner.getEntityName(), path, mappings, inheritedMetas ); + return list; + } + }; + private static final CollectionType BAG = new CollectionType( "bag" ) { + public Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + Bag bag = new Bag( owner ); + bindCollection( node, bag, owner.getEntityName(), path, mappings, inheritedMetas ); + return bag; + } + }; + private static final CollectionType IDBAG = new CollectionType( "idbag" ) { + public Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + IdentifierBag bag = new IdentifierBag( owner ); + bindCollection( node, bag, owner.getEntityName(), path, mappings, inheritedMetas ); + return bag; + } + }; + private static final CollectionType ARRAY = new CollectionType( "array" ) { + public Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + Array array = new Array( owner ); + bindArray( node, array, owner.getEntityName(), path, mappings, inheritedMetas ); + return array; + } + }; + private static final CollectionType PRIMITIVE_ARRAY = new CollectionType( "primitive-array" ) { + public Collection create(Element node, String path, PersistentClass owner, + Mappings mappings, java.util.Map inheritedMetas) throws MappingException { + PrimitiveArray array = new PrimitiveArray( owner ); + bindArray( node, array, owner.getEntityName(), path, mappings, inheritedMetas ); + return array; + } + }; + private static final HashMap INSTANCES = new HashMap(); + + static { + INSTANCES.put( MAP.toString(), MAP ); + INSTANCES.put( BAG.toString(), BAG ); + INSTANCES.put( IDBAG.toString(), IDBAG ); + INSTANCES.put( SET.toString(), SET ); + INSTANCES.put( LIST.toString(), LIST ); + INSTANCES.put( ARRAY.toString(), ARRAY ); + INSTANCES.put( PRIMITIVE_ARRAY.toString(), PRIMITIVE_ARRAY ); + } + + public static CollectionType collectionTypeFromString(String xmlTagName) { + return (CollectionType) INSTANCES.get( xmlTagName ); + } + } + + private static int getOptimisticLockMode(Attribute olAtt) throws MappingException { + + if ( olAtt == null ) return Versioning.OPTIMISTIC_LOCK_VERSION; + String olMode = olAtt.getValue(); + if ( olMode == null || "version".equals( olMode ) ) { + return Versioning.OPTIMISTIC_LOCK_VERSION; + } + else if ( "dirty".equals( olMode ) ) { + return Versioning.OPTIMISTIC_LOCK_DIRTY; + } + else if ( "all".equals( olMode ) ) { + return Versioning.OPTIMISTIC_LOCK_ALL; + } + else if ( "none".equals( olMode ) ) { + return Versioning.OPTIMISTIC_LOCK_NONE; + } + else { + throw new MappingException( "Unsupported optimistic-lock style: " + olMode ); + } + } + + private static final java.util.Map getMetas(Element node, java.util.Map inheritedMeta) { + return getMetas( node, inheritedMeta, false ); + } + + public static final java.util.Map getMetas(Element node, java.util.Map inheritedMeta, + boolean onlyInheritable) { + java.util.Map map = new HashMap(); + map.putAll( inheritedMeta ); + + Iterator iter = node.elementIterator( "meta" ); + while ( iter.hasNext() ) { + Element metaNode = (Element) iter.next(); + boolean inheritable = Boolean + .valueOf( metaNode.attributeValue( "inherit" ) ) + .booleanValue(); + if ( onlyInheritable & !inheritable ) { + continue; + } + String name = metaNode.attributeValue( "attribute" ); + + MetaAttribute meta = (MetaAttribute) map.get( name ); + MetaAttribute inheritedAttribute = (MetaAttribute) inheritedMeta.get( name ); + if ( meta == null ) { + meta = new MetaAttribute( name ); + map.put( name, meta ); + } else if (meta == inheritedAttribute) { // overriding inherited meta attribute. HBX-621 & HBX-793 + meta = new MetaAttribute( name ); + map.put( name, meta ); + } + meta.addValue( metaNode.getText() ); + } + return map; + } + + public static String getEntityName(Element elem, Mappings model) { + String entityName = elem.attributeValue( "entity-name" ); + return entityName == null ? getClassName( elem.attribute( "class" ), model ) : entityName; + } + + private static String getClassName(Attribute att, Mappings model) { + if ( att == null ) return null; + return getClassName( att.getValue(), model ); + } + + public static String getClassName(String unqualifiedName, Mappings model) { + return getClassName( unqualifiedName, model.getDefaultPackage() ); + } + + public static String getClassName(String unqualifiedName, String defaultPackage) { + if ( unqualifiedName == null ) return null; + if ( unqualifiedName.indexOf( '.' ) < 0 && defaultPackage != null ) { + return defaultPackage + '.' + unqualifiedName; + } + return unqualifiedName; + } + + private static void parseFilterDef(Element element, Mappings mappings) { + String name = element.attributeValue( "name" ); + log.debug( "Parsing filter-def [" + name + "]" ); + String defaultCondition = element.getTextTrim(); + if ( StringHelper.isEmpty( defaultCondition ) ) { + defaultCondition = element.attributeValue( "condition" ); + } + HashMap paramMappings = new HashMap(); + Iterator params = element.elementIterator( "filter-param" ); + while ( params.hasNext() ) { + final Element param = (Element) params.next(); + final String paramName = param.attributeValue( "name" ); + final String paramType = param.attributeValue( "type" ); + log.debug( "adding filter parameter : " + paramName + " -> " + paramType ); + final Type heuristicType = TypeFactory.heuristicType( paramType ); + log.debug( "parameter heuristic type : " + heuristicType ); + paramMappings.put( paramName, heuristicType ); + } + log.debug( "Parsed filter-def [" + name + "]" ); + FilterDefinition def = new FilterDefinition( name, defaultCondition, paramMappings ); + mappings.addFilterDefinition( def ); + } + + private static void parseFilter(Element filterElement, Filterable filterable, Mappings model) { + final String name = filterElement.attributeValue( "name" ); + String condition = filterElement.getTextTrim(); + if ( StringHelper.isEmpty(condition) ) { + condition = filterElement.attributeValue( "condition" ); + } + //TODO: bad implementation, cos it depends upon ordering of mapping doc + // fixing this requires that Collection/PersistentClass gain access + // to the Mappings reference from Configuration (or the filterDefinitions + // map directly) sometime during Configuration.buildSessionFactory + // (after all the types/filter-defs are known and before building + // persisters). + if ( StringHelper.isEmpty(condition) ) { + condition = model.getFilterDefinition(name).getDefaultFilterCondition(); + } + if ( condition==null) { + throw new MappingException("no filter condition found for filter: " + name); + } + log.debug( "Applying filter [" + name + "] as [" + condition + "]" ); + filterable.addFilter( name, condition ); + } + + private static String getSubselect(Element element) { + String subselect = element.attributeValue( "subselect" ); + if ( subselect != null ) { + return subselect; + } + else { + Element subselectElement = element.element( "subselect" ); + return subselectElement == null ? null : subselectElement.getText(); + } + } + + /** + * For the given document, locate all extends attributes which refer to + * entities (entity-name or class-name) not defined within said document. + * + * @param doc The document to check + * @param mappings The already processed mappings. + * @return The list of unresolved extends names. + */ + public static java.util.List getExtendsNeeded(Document doc, Mappings mappings) { + java.util.List extendz = new ArrayList(); + Iterator[] subclasses = new Iterator[3]; + final Element hmNode = doc.getRootElement(); + + Attribute packNode = hmNode.attribute( "package" ); + final String packageName = packNode == null ? null : packNode.getValue(); + if ( packageName != null ) { + mappings.setDefaultPackage( packageName ); + } + + // first, iterate over all elements capable of defining an extends attribute + // collecting all found extends references if they cannot be resolved + // against the already processed mappings. + subclasses[0] = hmNode.elementIterator( "subclass" ); + subclasses[1] = hmNode.elementIterator( "joined-subclass" ); + subclasses[2] = hmNode.elementIterator( "union-subclass" ); + + Iterator iterator = new JoinedIterator( subclasses ); + while ( iterator.hasNext() ) { + final Element element = (Element) iterator.next(); + final String extendsName = element.attributeValue( "extends" ); + // mappings might contain either the "raw" extends name (in the case of + // an entity-name mapping) or a FQN (in the case of a POJO mapping). + if ( mappings.getClass( extendsName ) == null && mappings.getClass( getClassName( extendsName, mappings ) ) == null ) { + extendz.add( extendsName ); + } + } + + if ( !extendz.isEmpty() ) { + // we found some extends attributes referencing entities which were + // not already processed. here we need to locate all entity-names + // and class-names contained in this document itself, making sure + // that these get removed from the extendz list such that only + // extends names which require us to delay processing (i.e. + // external to this document and not yet processed) are contained + // in the returned result + final java.util.Set set = new HashSet( extendz ); + EntityElementHandler handler = new EntityElementHandler() { + public void handleEntity(String entityName, String className, Mappings mappings) { + if ( entityName != null ) { + set.remove( entityName ); + } + else { + String fqn = getClassName( className, packageName ); + set.remove( fqn ); + if ( packageName != null ) { + set.remove( StringHelper.unqualify( fqn ) ); + } + } + } + }; + recognizeEntities( mappings, hmNode, handler ); + extendz.clear(); + extendz.addAll( set ); + } + + return extendz; + } + + /** + * Given an entity-containing-element (startNode) recursively locate all + * entity names defined within that element. + * + * @param mappings The already processed mappings + * @param startNode The containing element + * @param handler The thing that knows what to do whenever we recognize an + * entity-name + */ + private static void recognizeEntities( + Mappings mappings, + final Element startNode, + EntityElementHandler handler) { + Iterator[] classes = new Iterator[4]; + classes[0] = startNode.elementIterator( "class" ); + classes[1] = startNode.elementIterator( "subclass" ); + classes[2] = startNode.elementIterator( "joined-subclass" ); + classes[3] = startNode.elementIterator( "union-subclass" ); + + Iterator classIterator = new JoinedIterator( classes ); + while ( classIterator.hasNext() ) { + Element element = (Element) classIterator.next(); + handler.handleEntity( + element.attributeValue( "entity-name" ), + element.attributeValue( "name" ), + mappings + ); + recognizeEntities( mappings, element, handler ); + } + } + + private static interface EntityElementHandler { + public void handleEntity(String entityName, String className, Mappings mappings); + } +} diff --git a/src/org/hibernate/cfg/ImprovedNamingStrategy.java b/src/org/hibernate/cfg/ImprovedNamingStrategy.java new file mode 100644 index 0000000000..a6f842fc88 --- /dev/null +++ b/src/org/hibernate/cfg/ImprovedNamingStrategy.java @@ -0,0 +1,124 @@ +//$Id$ +package org.hibernate.cfg; + +import java.io.Serializable; + +import org.hibernate.util.StringHelper; +import org.hibernate.AssertionFailure; + +/** + * An improved naming strategy that prefers embedded + * underscores to mixed case names + * @see DefaultNamingStrategy the default strategy + * @author Gavin King + */ +public class ImprovedNamingStrategy implements NamingStrategy, Serializable { + + /** + * A convenient singleton instance + */ + public static final NamingStrategy INSTANCE = new ImprovedNamingStrategy(); + + /** + * Return the unqualified class name, mixed case converted to + * underscores + */ + public String classToTableName(String className) { + return addUnderscores( StringHelper.unqualify(className) ); + } + /** + * Return the full property path with underscore seperators, mixed + * case converted to underscores + */ + public String propertyToColumnName(String propertyName) { + return addUnderscores( StringHelper.unqualify(propertyName) ); + } + /** + * Convert mixed case to underscores + */ + public String tableName(String tableName) { + return addUnderscores(tableName); + } + /** + * Convert mixed case to underscores + */ + public String columnName(String columnName) { + return addUnderscores(columnName); + } + + protected static String addUnderscores(String name) { + StringBuffer buf = new StringBuffer( name.replace('.', '_') ); + for (int i=1; i<hibernate-mapping> element.) + * @author Gavin King + */ +public class Mappings implements Serializable { + + private static final Log log = LogFactory.getLog(Mappings.class); + + protected final Map classes; + protected final Map collections; + protected final Map tables; + protected final Map queries; + protected final Map sqlqueries; + protected final Map resultSetMappings; + protected final Map typeDefs; + protected final List secondPasses; + protected final Map imports; + protected String schemaName; + protected String catalogName; + protected String defaultCascade; + protected String defaultPackage; + protected String defaultAccess; + protected boolean autoImport; + protected boolean defaultLazy; + protected final List propertyReferences; + protected final NamingStrategy namingStrategy; + protected final Map filterDefinitions; + protected final List auxiliaryDatabaseObjects; + + protected final Map extendsQueue; +// private final List extendsQueue; + + /** + * binding table between the logical column name and the name out of the naming strategy + * for each table. + * According that when the column name is not set, the property name is considered as such + * This means that while theorically possible through the naming strategy contract, it is + * forbidden to have 2 real columns having the same logical name + * + */ + protected final Map columnNameBindingPerTable; + /** + * binding between logical table name and physical one (ie after the naming strategy has been applied) + * + */ + protected final Map tableNameBinding; + + + Mappings( + final Map classes, + final Map collections, + final Map tables, + final Map queries, + final Map sqlqueries, + final Map sqlResultSetMappings, + final Map imports, + final List secondPasses, + final List propertyReferences, + final NamingStrategy namingStrategy, + final Map typeDefs, + final Map filterDefinitions, +// final List extendsQueue, + final Map extendsQueue, + final List auxiliaryDatabaseObjects, + final Map tableNamebinding, + final Map columnNameBindingPerTable + ) { + this.classes = classes; + this.collections = collections; + this.queries = queries; + this.sqlqueries = sqlqueries; + this.resultSetMappings = sqlResultSetMappings; + this.tables = tables; + this.imports = imports; + this.secondPasses = secondPasses; + this.propertyReferences = propertyReferences; + this.namingStrategy = namingStrategy; + this.typeDefs = typeDefs; + this.filterDefinitions = filterDefinitions; + this.extendsQueue = extendsQueue; + this.auxiliaryDatabaseObjects = auxiliaryDatabaseObjects; + this.tableNameBinding = tableNamebinding; + this.columnNameBindingPerTable = columnNameBindingPerTable; + } + + public void addClass(PersistentClass persistentClass) throws MappingException { + Object old = classes.put( persistentClass.getEntityName(), persistentClass ); + if ( old!=null ) { + throw new DuplicateMappingException( "class/entity", persistentClass.getEntityName() ); + } + } + public void addCollection(Collection collection) throws MappingException { + Object old = collections.put( collection.getRole(), collection ); + if ( old!=null ) { + throw new DuplicateMappingException( "collection role", collection.getRole() ); + } + } + public PersistentClass getClass(String className) { + return (PersistentClass) classes.get(className); + } + public Collection getCollection(String role) { + return (Collection) collections.get(role); + } + + public void addImport(String className, String rename) throws MappingException { + String existing = (String) imports.put(rename, className); + if ( existing!=null ) { + if ( existing.equals(className) ) { + log.info( "duplicate import: " + className + "->" + rename ); + } + else { + throw new DuplicateMappingException( + "duplicate import: " + rename + + " refers to both " + className + + " and " + existing + + " (try using auto-import=\"false\")", + "import", + rename + ); + } + } + } + + public Table addTable(String schema, + String catalog, + String name, + String subselect, + boolean isAbstract + ) { + String key = subselect==null ? + Table.qualify(catalog, schema, name) : + subselect; + Table table = (Table) tables.get(key); + + if (table == null) { + table = new Table(); + table.setAbstract(isAbstract); + table.setName(name); + table.setSchema(schema); + table.setCatalog(catalog); + table.setSubselect(subselect); + tables.put(key, table); + } + else { + if (!isAbstract) table.setAbstract(false); + } + + return table; + } + + public Table addDenormalizedTable( + String schema, + String catalog, + String name, + boolean isAbstract, + String subselect, + Table includedTable) + throws MappingException { + String key = subselect==null ? + Table.qualify(catalog, schema, name) : + subselect; + if ( tables.containsKey(key) ) { + throw new DuplicateMappingException("table", name); + } + + Table table = new DenormalizedTable(includedTable); + table.setAbstract(isAbstract); + table.setName(name); + table.setSchema(schema); + table.setCatalog(catalog); + table.setSubselect(subselect); + tables.put(key, table); + return table; + } + + public Table getTable(String schema, String catalog, String name) { + String key = Table.qualify(catalog, schema, name); + return (Table) tables.get(key); + } + + public String getSchemaName() { + return schemaName; + } + + public String getCatalogName() { + return catalogName; + } + + public String getDefaultCascade() { + return defaultCascade; + } + + /** + * Sets the schemaName. + * @param schemaName The schemaName to set + */ + public void setSchemaName(String schemaName) { + this.schemaName = schemaName; + } + + /** + * Sets the catalogName. + * @param catalogName The catalogName to set + */ + public void setCatalogName(String catalogName) { + this.catalogName = catalogName; + } + + /** + * Sets the defaultCascade. + * @param defaultCascade The defaultCascade to set + */ + public void setDefaultCascade(String defaultCascade) { + this.defaultCascade = defaultCascade; + } + + /** + * sets the default access strategy + * @param defaultAccess the default access strategy. + */ + public void setDefaultAccess(String defaultAccess) { + this.defaultAccess = defaultAccess; + } + + public String getDefaultAccess() { + return defaultAccess; + } + + public void addQuery(String name, NamedQueryDefinition query) throws MappingException { + checkQueryExist(name); + queries.put( name.intern(), query ); + } + + public void addSQLQuery(String name, NamedSQLQueryDefinition query) throws MappingException { + checkQueryExist(name); + sqlqueries.put( name.intern(), query ); + } + + private void checkQueryExist(String name) throws MappingException { + if ( sqlqueries.containsKey(name) || queries.containsKey(name) ) { + throw new DuplicateMappingException("query", name); + } + } + + public void addResultSetMapping(ResultSetMappingDefinition sqlResultSetMapping) { + final String name = sqlResultSetMapping.getName(); + if ( resultSetMappings.containsKey(name) ) { + throw new DuplicateMappingException("resultSet", name); + } + resultSetMappings.put(name, sqlResultSetMapping); + } + + public ResultSetMappingDefinition getResultSetMapping(String name) { + return (ResultSetMappingDefinition) resultSetMappings.get(name); + } + + + public NamedQueryDefinition getQuery(String name) { + return (NamedQueryDefinition) queries.get(name); + } + + public void addSecondPass(SecondPass sp) { + addSecondPass(sp, false); + } + + public void addSecondPass(SecondPass sp, boolean onTopOfTheQueue) { + if (onTopOfTheQueue) { + secondPasses.add(0, sp); + } + else { + secondPasses.add(sp); + } + } + + /** + * Returns the autoImport. + * @return boolean + */ + public boolean isAutoImport() { + return autoImport; + } + + /** + * Sets the autoImport. + * @param autoImport The autoImport to set + */ + public void setAutoImport(boolean autoImport) { + this.autoImport = autoImport; + } + + void addUniquePropertyReference(String referencedClass, String propertyName) { + PropertyReference upr = new PropertyReference(); + upr.referencedClass = referencedClass; + upr.propertyName = propertyName; + upr.unique = true; + propertyReferences.add(upr); + } + + void addPropertyReference(String referencedClass, String propertyName) { + PropertyReference upr = new PropertyReference(); + upr.referencedClass = referencedClass; + upr.propertyName = propertyName; + propertyReferences.add(upr); + } + + private String buildTableNameKey(String schema, String catalog, String finalName) { + StringBuffer keyBuilder = new StringBuffer(); + if (schema != null) keyBuilder.append( schema ); + keyBuilder.append( "."); + if (catalog != null) keyBuilder.append( catalog ); + keyBuilder.append( "."); + keyBuilder.append( finalName ); + return keyBuilder.toString(); + } + + static final class PropertyReference implements Serializable { + String referencedClass; + String propertyName; + boolean unique; + } + + /** + * @return Returns the defaultPackage. + */ + public String getDefaultPackage() { + return defaultPackage; + } + + /** + * @param defaultPackage The defaultPackage to set. + */ + public void setDefaultPackage(String defaultPackage) { + this.defaultPackage = defaultPackage; + } + + public NamingStrategy getNamingStrategy() { + return namingStrategy; + } + + public void addTypeDef(String typeName, String typeClass, Properties paramMap) { + TypeDef def = new TypeDef(typeClass, paramMap); + typeDefs.put(typeName, def); + log.debug("Added " + typeName + " with class " + typeClass); + } + + public TypeDef getTypeDef(String typeName) { + return (TypeDef) typeDefs.get(typeName); + } + + public Iterator iterateCollections() { + return collections.values().iterator(); + } + + public Iterator iterateTables() { + return tables.values().iterator(); + } + + public Map getFilterDefinitions() { + return filterDefinitions; + } + + public void addFilterDefinition(FilterDefinition definition) { + filterDefinitions.put( definition.getFilterName(), definition ); + } + + public FilterDefinition getFilterDefinition(String name) { + return (FilterDefinition) filterDefinitions.get(name); + } + + public boolean isDefaultLazy() { + return defaultLazy; + } + public void setDefaultLazy(boolean defaultLazy) { + this.defaultLazy = defaultLazy; + } + + public void addToExtendsQueue(ExtendsQueueEntry entry) { + extendsQueue.put( entry, null ); + } + + public PersistentClass locatePersistentClassByEntityName(String entityName) { + PersistentClass persistentClass = ( PersistentClass ) classes.get( entityName ); + if ( persistentClass == null ) { + String actualEntityName = ( String ) imports.get( entityName ); + if ( StringHelper.isNotEmpty( actualEntityName ) ) { + persistentClass = ( PersistentClass ) classes.get( actualEntityName ); + } + } + return persistentClass; + } + + public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject auxiliaryDatabaseObject) { + auxiliaryDatabaseObjects.add( auxiliaryDatabaseObject ); + } + + public void addTableBinding( + String schema, String catalog, String logicalName, String physicalName, Table denormalizedSuperTable + ) { + String key = buildTableNameKey( schema, catalog, physicalName ); + TableDescription tableDescription = new TableDescription( + logicalName, denormalizedSuperTable + ); + TableDescription oldDescriptor = (TableDescription) tableNameBinding.put( key, tableDescription ); + if ( oldDescriptor != null && ! oldDescriptor.logicalName.equals( logicalName ) ) { + //TODO possibly relax that + throw new MappingException("Same physical table name reference several logical table names: " + + physicalName + " => " + "'" + oldDescriptor.logicalName + "' and '" + logicalName + "'"); + } + } + + public void addColumnBinding(String logicalName, Column finalColumn, Table table) { + ColumnNames binding = (ColumnNames) columnNameBindingPerTable.get(table); + if (binding == null) { + binding = new ColumnNames(); + columnNameBindingPerTable.put(table, binding); + } + String oldFinalName = (String) binding.logicalToPhysical.put( + logicalName.toLowerCase(), + finalColumn.getQuotedName() + ); + if ( oldFinalName != null && + ! ( finalColumn.isQuoted() ? + oldFinalName.equals( finalColumn.getQuotedName() ) : + oldFinalName.equalsIgnoreCase( finalColumn.getQuotedName() ) ) ) { + //TODO possibly relax that + throw new MappingException("Same logical column name referenced by different physical ones: " + + table.getName() + "." + logicalName + " => '" + oldFinalName + "' and '" + finalColumn.getQuotedName() + "'" ); + } + String oldLogicalName = (String) binding.physicalToLogical.put( + finalColumn.getQuotedName(), + logicalName + ); + if ( oldLogicalName != null && ! oldLogicalName.equals( logicalName ) ) { + //TODO possibly relax that + throw new MappingException("Same physical column represented by different logical column names: " + + table.getName() + "." + finalColumn.getQuotedName() + " => '" + oldLogicalName + "' and '" + logicalName + "'"); + } + } + + private String getLogicalTableName(String schema, String catalog, String physicalName) { + String key = buildTableNameKey( schema, catalog, physicalName ); + TableDescription descriptor = (TableDescription) tableNameBinding.get( key ); + if (descriptor == null) { + throw new MappingException( "Unable to find physical table: " + physicalName); + } + return descriptor.logicalName; + } + + public String getPhysicalColumnName(String logicalName, Table table) { + logicalName = logicalName.toLowerCase(); + String finalName = null; + Table currentTable = table; + do { + ColumnNames binding = (ColumnNames) columnNameBindingPerTable.get(currentTable); + if (binding != null) { + finalName = (String) binding.logicalToPhysical.get( logicalName ); + } + String key = buildTableNameKey( currentTable.getSchema(), currentTable.getCatalog(), currentTable.getName() ); + TableDescription description = (TableDescription) tableNameBinding.get(key); + if (description != null) currentTable = description.denormalizedSupertable; + } + while (finalName == null && currentTable != null); + if (finalName == null) { + throw new MappingException( "Unable to find column with logical name " + + logicalName + " in table " + table.getName() ); + } + return finalName; + } + + public String getLogicalColumnName(String physicalName, Table table) { + String logical = null; + Table currentTable = table; + TableDescription description = null; + do { + ColumnNames binding = (ColumnNames) columnNameBindingPerTable.get(currentTable); + if (binding != null) { + logical = (String) binding.physicalToLogical.get( physicalName ); + } + String key = buildTableNameKey( currentTable.getSchema(), currentTable.getCatalog(), currentTable.getName() ); + description = (TableDescription) tableNameBinding.get(key); + if (description != null) currentTable = description.denormalizedSupertable; + } + while (logical == null && currentTable != null && description != null); + if (logical == null) { + throw new MappingException( "Unable to find logical column name from physical name " + + physicalName + " in table " + table.getName() ); + } + return logical; + } + + public String getLogicalTableName(Table table) { + return getLogicalTableName( table.getQuotedSchema(), table.getCatalog(), table.getQuotedName() ); + } + + static public class ColumnNames implements Serializable { + // + public Map logicalToPhysical = new HashMap(); + // + public Map physicalToLogical = new HashMap(); + public ColumnNames() { + } + } + + static public class TableDescription implements Serializable { + public TableDescription(String logicalName, Table denormalizedSupertable) { + this.logicalName = logicalName; + this.denormalizedSupertable = denormalizedSupertable; + } + + public String logicalName; + public Table denormalizedSupertable; + } +} \ No newline at end of file diff --git a/src/org/hibernate/cfg/NamedSQLQuerySecondPass.java b/src/org/hibernate/cfg/NamedSQLQuerySecondPass.java new file mode 100644 index 0000000000..2c841caa11 --- /dev/null +++ b/src/org/hibernate/cfg/NamedSQLQuerySecondPass.java @@ -0,0 +1,99 @@ +//$Id$ +package org.hibernate.cfg; + +import java.util.Map; +import java.util.ArrayList; +import java.util.Iterator; + +import org.hibernate.MappingException; +import org.hibernate.util.StringHelper; +import org.hibernate.engine.NamedSQLQueryDefinition; +import org.hibernate.engine.ResultSetMappingDefinition; +import org.dom4j.Attribute; +import org.dom4j.Element; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * @author Emmanuel Bernard + */ +public class NamedSQLQuerySecondPass extends ResultSetMappingBinder implements QuerySecondPass { + private static Log log = LogFactory.getLog( NamedSQLQuerySecondPass.class); + private Element queryElem; + private String path; + private Mappings mappings; + + public NamedSQLQuerySecondPass(Element queryElem, String path, Mappings mappings) { + this.queryElem = queryElem; + this.path = path; + this.mappings = mappings; + } + + public void doSecondPass(Map persistentClasses) throws MappingException { + String queryName = queryElem.attribute( "name" ).getValue(); + if (path!=null) queryName = path + '.' + queryName; + + boolean cacheable = "true".equals( queryElem.attributeValue( "cacheable" ) ); + String region = queryElem.attributeValue( "cache-region" ); + Attribute tAtt = queryElem.attribute( "timeout" ); + Integer timeout = tAtt == null ? null : new Integer( tAtt.getValue() ); + Attribute fsAtt = queryElem.attribute( "fetch-size" ); + Integer fetchSize = fsAtt == null ? null : new Integer( fsAtt.getValue() ); + Attribute roAttr = queryElem.attribute( "read-only" ); + boolean readOnly = roAttr != null && "true".equals( roAttr.getValue() ); + Attribute cacheModeAtt = queryElem.attribute( "cache-mode" ); + String cacheMode = cacheModeAtt == null ? null : cacheModeAtt.getValue(); + Attribute cmAtt = queryElem.attribute( "comment" ); + String comment = cmAtt == null ? null : cmAtt.getValue(); + + java.util.List synchronizedTables = new ArrayList(); + Iterator tables = queryElem.elementIterator( "synchronize" ); + while ( tables.hasNext() ) { + synchronizedTables.add( ( (Element) tables.next() ).attributeValue( "table" ) ); + } + boolean callable = "true".equals( queryElem.attributeValue( "callable" ) ); + + NamedSQLQueryDefinition namedQuery; + Attribute ref = queryElem.attribute( "resultset-ref" ); + String resultSetRef = ref == null ? null : ref.getValue(); + if ( StringHelper.isNotEmpty( resultSetRef ) ) { + namedQuery = new NamedSQLQueryDefinition( + queryElem.getText(), + resultSetRef, + synchronizedTables, + cacheable, + region, + timeout, + fetchSize, + HbmBinder.getFlushMode( queryElem.attributeValue( "flush-mode" ) ), + HbmBinder.getCacheMode( cacheMode ), + readOnly, + comment, + HbmBinder.getParameterTypes( queryElem ), + callable + ); + //TODO check there is no actual definition elemnents when a ref is defined + } + else { + ResultSetMappingDefinition definition = buildResultSetMappingDefinition( queryElem, path, mappings ); + namedQuery = new NamedSQLQueryDefinition( + queryElem.getText(), + definition.getQueryReturns(), + synchronizedTables, + cacheable, + region, + timeout, + fetchSize, + HbmBinder.getFlushMode( queryElem.attributeValue( "flush-mode" ) ), + HbmBinder.getCacheMode( cacheMode ), + readOnly, + comment, + HbmBinder.getParameterTypes( queryElem ), + callable + ); + } + + log.debug( "Named SQL query: " + queryName + " -> " + namedQuery.getQueryString() ); + mappings.addSQLQuery( queryName, namedQuery ); + } +} diff --git a/src/org/hibernate/cfg/NamingStrategy.java b/src/org/hibernate/cfg/NamingStrategy.java new file mode 100644 index 0000000000..f69b74447b --- /dev/null +++ b/src/org/hibernate/cfg/NamingStrategy.java @@ -0,0 +1,101 @@ +//$Id$ +package org.hibernate.cfg; + +/** + * A set of rules for determining the physical column + * and table names given the information in the mapping + * document. May be used to implement project-scoped + * naming standards for database objects. + * + * #propertyToTableName(String, String) should be replaced by + * {@link #collectionTableName(String,String,String,String,String)} + * + * @see DefaultNamingStrategy + * @see ImprovedNamingStrategy + * @author Gavin King + * @author Emmanuel Bernard + */ +public interface NamingStrategy { + /** + * Return a table name for an entity class + * @param className the fully-qualified class name + * @return a table name + */ + public String classToTableName(String className); + /** + * Return a column name for a property path expression + * @param propertyName a property path + * @return a column name + */ + public String propertyToColumnName(String propertyName); + /** + * Alter the table name given in the mapping document + * @param tableName a table name + * @return a table name + */ + public String tableName(String tableName); + /** + * Alter the column name given in the mapping document + * @param columnName a column name + * @return a column name + */ + public String columnName(String columnName); + /** + * Return a collection table name ie an association having a join table + * + * @param ownerEntity + * @param ownerEntityTable owner side table name + * @param associatedEntity + * @param associatedEntityTable reverse side table name if any + * @param propertyName collection role + */ + public String collectionTableName( + String ownerEntity, String ownerEntityTable, String associatedEntity, String associatedEntityTable, + String propertyName + ); + /** + * Return the join key column name ie a FK column used in a JOINED strategy or for a secondary table + * + * @param joinedColumn joined column name (logical one) used to join with + * @param joinedTable joined table name (ie the referenced table) used to join with + */ + public String joinKeyColumnName(String joinedColumn, String joinedTable); + /** + * Return the foreign key column name for the given parameters + * @param propertyName the property name involved + * @param propertyEntityName + * @param propertyTableName the property table name involved (logical one) + * @param referencedColumnName the referenced column name involved (logical one) + */ + public String foreignKeyColumnName( + String propertyName, String propertyEntityName, String propertyTableName, String referencedColumnName + ); + /** + * Return the logical column name used to refer to a column in the metadata + * (like index, unique constraints etc) + * A full bijection is required between logicalNames and physical ones + * logicalName have to be case insersitively unique for a given table + * + * @param columnName given column name if any + * @param propertyName property name of this column + */ + public String logicalColumnName(String columnName, String propertyName); + /** + * Returns the logical collection table name used to refer to a table in the mapping metadata + * + * @param tableName the metadata explicit name + * @param ownerEntityTable owner table entity table name (logical one) + * @param associatedEntityTable reverse side table name if any (logical one) + * @param propertyName collection role + */ + public String logicalCollectionTableName(String tableName, String ownerEntityTable, String associatedEntityTable, String propertyName); + + /** + * Returns the logical foreign key column name used to refer to this column in the mapping metadata + * + * @param columnName given column name in the metadata if any + * @param propertyName property name + * @param referencedColumn referenced column name (logical one) in the join + */ + public String logicalCollectionColumnName(String columnName, String propertyName, String referencedColumn); +} diff --git a/src/org/hibernate/cfg/QuerySecondPass.java b/src/org/hibernate/cfg/QuerySecondPass.java new file mode 100644 index 0000000000..d9dd1bad35 --- /dev/null +++ b/src/org/hibernate/cfg/QuerySecondPass.java @@ -0,0 +1,10 @@ +//$Id$ +package org.hibernate.cfg; + +/** + * Bind query + * + * @author Emmanuel Bernard + */ +public interface QuerySecondPass extends SecondPass { +} diff --git a/src/org/hibernate/cfg/ResultSetMappingBinder.java b/src/org/hibernate/cfg/ResultSetMappingBinder.java new file mode 100644 index 0000000000..d07960186a --- /dev/null +++ b/src/org/hibernate/cfg/ResultSetMappingBinder.java @@ -0,0 +1,366 @@ +//$Id$ +package org.hibernate.cfg; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.HashSet; +import java.util.Map; + +import org.dom4j.Element; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.engine.query.sql.NativeSQLQueryCollectionReturn; +import org.hibernate.engine.ResultSetMappingDefinition; +import org.hibernate.engine.query.sql.NativeSQLQueryJoinReturn; +import org.hibernate.engine.query.sql.NativeSQLQueryRootReturn; +import org.hibernate.engine.query.sql.NativeSQLQueryScalarReturn; +import org.hibernate.mapping.Component; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.mapping.Value; +import org.hibernate.mapping.Property; +import org.hibernate.mapping.ToOne; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.CollectionHelper; +import org.hibernate.util.StringHelper; + +/** + * @author Emmanuel Bernard + */ +public abstract class ResultSetMappingBinder { + /** + * Build a ResultSetMappingDefinition given a containing element for the "return-XXX" elements + * + * @param resultSetElem The element containing the return definitions. + * @param path No clue... + * @param mappings The current processing state. + * @return The description of the mappings... + */ + protected static ResultSetMappingDefinition buildResultSetMappingDefinition(Element resultSetElem, String path, Mappings mappings) { + String resultSetName = resultSetElem.attribute( "name" ).getValue(); + if ( path != null ) { + resultSetName = path + '.' + resultSetName; + } + ResultSetMappingDefinition definition = new ResultSetMappingDefinition( resultSetName ); + + int cnt = 0; + Iterator returns = resultSetElem.elementIterator(); + while ( returns.hasNext() ) { + cnt++; + Element returnElem = (Element) returns.next(); + String name = returnElem.getName(); + if ( "return-scalar".equals( name ) ) { + String column = returnElem.attributeValue( "column" ); + String typeFromXML = HbmBinder.getTypeFromXML( returnElem ); + Type type = null; + if(typeFromXML!=null) { + type = TypeFactory.heuristicType( typeFromXML ); + if ( type == null ) { + throw new MappingException( "could not determine type " + type ); + } + } + definition.addQueryReturn( new NativeSQLQueryScalarReturn( column, type ) ); + } + else if ( "return".equals( name ) ) { + definition.addQueryReturn( bindReturn( returnElem, mappings, cnt ) ); + } + else if ( "return-join".equals( name ) ) { + definition.addQueryReturn( bindReturnJoin( returnElem, mappings ) ); + } + else if ( "load-collection".equals( name ) ) { + definition.addQueryReturn( bindLoadCollection( returnElem, mappings ) ); + } + } + return definition; + } + + private static NativeSQLQueryRootReturn bindReturn(Element returnElem, Mappings mappings, int elementCount) { + String alias = returnElem.attributeValue( "alias" ); + if( StringHelper.isEmpty(alias)) { + alias = "alias_" + elementCount; // hack/workaround as sqlquery impl depend on having a key. + } + + String entityName = HbmBinder.getEntityName(returnElem, mappings); + if(entityName==null) { + throw new MappingException( " must specify either a class or entity-name"); + } + LockMode lockMode = getLockMode( returnElem.attributeValue( "lock-mode" ) ); + + PersistentClass pc = mappings.getClass( entityName ); + java.util.Map propertyResults = bindPropertyResults(alias, returnElem, pc, mappings ); + + return new NativeSQLQueryRootReturn( + alias, + entityName, + propertyResults, + lockMode + ); + } + + private static NativeSQLQueryJoinReturn bindReturnJoin(Element returnElem, Mappings mappings) { + String alias = returnElem.attributeValue( "alias" ); + String roleAttribute = returnElem.attributeValue( "property" ); + LockMode lockMode = getLockMode( returnElem.attributeValue( "lock-mode" ) ); + int dot = roleAttribute.lastIndexOf( '.' ); + if ( dot == -1 ) { + throw new MappingException( + "Role attribute for sql query return [alias=" + alias + + "] not formatted correctly {owningAlias.propertyName}" + ); + } + String roleOwnerAlias = roleAttribute.substring( 0, dot ); + String roleProperty = roleAttribute.substring( dot + 1 ); + + //FIXME: get the PersistentClass + java.util.Map propertyResults = bindPropertyResults(alias, returnElem, null, mappings ); + + return new NativeSQLQueryJoinReturn( + alias, + roleOwnerAlias, + roleProperty, + propertyResults, // TODO: bindpropertyresults(alias, returnElem) + lockMode + ); + } + + private static NativeSQLQueryCollectionReturn bindLoadCollection(Element returnElem, Mappings mappings) { + String alias = returnElem.attributeValue( "alias" ); + String collectionAttribute = returnElem.attributeValue( "role" ); + LockMode lockMode = getLockMode( returnElem.attributeValue( "lock-mode" ) ); + int dot = collectionAttribute.lastIndexOf( '.' ); + if ( dot == -1 ) { + throw new MappingException( + "Collection attribute for sql query return [alias=" + alias + + "] not formatted correctly {OwnerClassName.propertyName}" + ); + } + String ownerClassName = HbmBinder.getClassName( collectionAttribute.substring( 0, dot ), mappings ); + String ownerPropertyName = collectionAttribute.substring( dot + 1 ); + + //FIXME: get the PersistentClass + java.util.Map propertyResults = bindPropertyResults(alias, returnElem, null, mappings ); + + return new NativeSQLQueryCollectionReturn( + alias, + ownerClassName, + ownerPropertyName, + propertyResults, + lockMode + ); + } + + private static java.util.Map bindPropertyResults( + String alias, Element returnElement, PersistentClass pc, Mappings mappings + ) { + + HashMap propertyresults = new HashMap(); // maybe a concrete SQLpropertyresult type, but Map is exactly what is required at the moment + + Element discriminatorResult = returnElement.element("return-discriminator"); + if(discriminatorResult!=null) { + ArrayList resultColumns = getResultColumns(discriminatorResult); + propertyresults.put("class", ArrayHelper.toStringArray(resultColumns) ); + } + Iterator iterator = returnElement.elementIterator("return-property"); + List properties = new ArrayList(); + List propertyNames = new ArrayList(); + while ( iterator.hasNext() ) { + Element propertyresult = (Element) iterator.next(); + String name = propertyresult.attributeValue("name"); + if ( pc == null || name.indexOf( '.') == -1) { //if dotted and not load-collection nor return-join + //regular property + properties.add(propertyresult); + propertyNames.add(name); + } + else { + /** + * Reorder properties + * 1. get the parent property + * 2. list all the properties following the expected one in the parent property + * 3. calculate the lowest index and insert the property + */ + if (pc == null) + throw new MappingException("dotted notation in or not yet supported"); + int dotIndex = name.lastIndexOf( '.' ); + String reducedName = name.substring( 0, dotIndex ); + Value value = pc.getRecursiveProperty( reducedName ).getValue(); + Iterator parentPropIter; + if ( value instanceof Component ) { + Component comp = (Component) value; + parentPropIter = comp.getPropertyIterator(); + } + else if ( value instanceof ToOne ) { + ToOne toOne = (ToOne) value; + PersistentClass referencedPc = mappings.getClass( toOne.getReferencedEntityName() ); + if ( toOne.getReferencedPropertyName() != null ) { + try { + parentPropIter = ( (Component) referencedPc.getRecursiveProperty( toOne.getReferencedPropertyName() ).getValue() ).getPropertyIterator(); + } catch (ClassCastException e) { + throw new MappingException("dotted notation reference neither a component nor a many/one to one", e); + } + } + else { + try { + if ( referencedPc.getIdentifierMapper() == null ) { + parentPropIter = ( (Component) referencedPc.getIdentifierProperty().getValue() ).getPropertyIterator(); + } + else { + parentPropIter = referencedPc.getIdentifierMapper().getPropertyIterator(); + } + } + catch (ClassCastException e) { + throw new MappingException("dotted notation reference neither a component nor a many/one to one", e); + } + } + } + else { + throw new MappingException("dotted notation reference neither a component nor a many/one to one"); + } + boolean hasFollowers = false; + List followers = new ArrayList(); + while ( parentPropIter.hasNext() ) { + String currentPropertyName = ( (Property) parentPropIter.next() ).getName(); + String currentName = reducedName + '.' + currentPropertyName; + if (hasFollowers) { + followers.add( currentName ); + } + if ( name.equals( currentName ) ) hasFollowers = true; + } + + int index = propertyNames.size(); + int followersSize = followers.size(); + for (int loop = 0 ; loop < followersSize ; loop++) { + String follower = (String) followers.get(loop); + int currentIndex = getIndexOfFirstMatchingProperty(propertyNames, follower); + index = currentIndex != -1 && currentIndex < index ? currentIndex : index; + } + propertyNames.add(index, name); + properties.add(index, propertyresult); + } + } + + Set uniqueReturnProperty = new HashSet(); + iterator = properties.iterator(); + while ( iterator.hasNext() ) { + Element propertyresult = (Element) iterator.next(); + String name = propertyresult.attributeValue("name"); + if ( "class".equals(name) ) { + throw new MappingException( + "class is not a valid property name to use in a , use instead" + ); + } + //TODO: validate existing of property with the chosen name. (secondpass ) + ArrayList allResultColumns = getResultColumns(propertyresult); + + if ( allResultColumns.isEmpty() ) { + throw new MappingException( + "return-property for alias " + alias + + " must specify at least one column or return-column name" + ); + } + if ( uniqueReturnProperty.contains( name ) ) { + throw new MappingException( + "duplicate return-property for property " + name + + " on alias " + alias + ); + } + uniqueReturnProperty.add(name); + + // the issue here is that for representing an entity collection, + // the collection element values (the property values of the associated entity) + // are represented as 'element.{propertyname}'. Thus the StringHelper.root() + // here puts everything under 'element' (which additionally has significant + // meaning). Probably what we need to do is to something like this instead: + // String root = StringHelper.root( name ); + // String key = root; // by default + // if ( !root.equals( name ) ) { + // // we had a dot + // if ( !root.equals( alias ) { + // // the root does not apply to the specific alias + // if ( "elements".equals( root ) { + // // we specifically have a representing an entity collection + // // and this is one of that entity's properties + // key = name; + // } + // } + // } + // but I am not clear enough on the intended purpose of this code block, especially + // in relation to the "Reorder properties" code block above... +// String key = StringHelper.root( name ); + String key = name; + ArrayList intermediateResults = (ArrayList) propertyresults.get( key ); + if (intermediateResults == null) { + propertyresults.put( key, allResultColumns ); + } + else { + intermediateResults.addAll( allResultColumns ); + } + } + + Iterator entries = propertyresults.entrySet().iterator(); + while ( entries.hasNext() ) { + Map.Entry entry = (Map.Entry) entries.next(); + if (entry.getValue() instanceof ArrayList) { + ArrayList list = (ArrayList) entry.getValue(); + entry.setValue( list.toArray( new String[ list.size() ] ) ); + } + } + return propertyresults.isEmpty() ? CollectionHelper.EMPTY_MAP : propertyresults; + } + + private static int getIndexOfFirstMatchingProperty(List propertyNames, String follower) { + int propertySize = propertyNames.size(); + for (int propIndex = 0 ; propIndex < propertySize ; propIndex++) { + if ( ( (String) propertyNames.get(propIndex) ).startsWith( follower ) ) { + return propIndex; + } + } + return -1; + } + + private static ArrayList getResultColumns(Element propertyresult) { + String column = unquote(propertyresult.attributeValue("column")); + ArrayList allResultColumns = new ArrayList(); + if(column!=null) allResultColumns.add(column); + Iterator resultColumns = propertyresult.elementIterator("return-column"); + while ( resultColumns.hasNext() ) { + Element element = (Element) resultColumns.next(); + allResultColumns.add( unquote(element.attributeValue("name")) ); + } + return allResultColumns; + } + + private static String unquote(String name) { + if (name!=null && name.charAt(0)=='`') { + name=name.substring( 1, name.length()-1 ); + } + return name; + } + + private static LockMode getLockMode(String lockMode) { + if ( lockMode == null || "read".equals( lockMode ) ) { + return LockMode.READ; + } + else if ( "none".equals( lockMode ) ) { + return LockMode.NONE; + } + else if ( "upgrade".equals( lockMode ) ) { + return LockMode.UPGRADE; + } + else if ( "upgrade-nowait".equals( lockMode ) ) { + return LockMode.UPGRADE_NOWAIT; + } + else if ( "write".equals( lockMode ) ) { + return LockMode.WRITE; + } + else if ( "force".equals( lockMode ) ) { + return LockMode.FORCE; + } + else { + throw new MappingException( "unknown lockmode" ); + } + } +} diff --git a/src/org/hibernate/cfg/ResultSetMappingSecondPass.java b/src/org/hibernate/cfg/ResultSetMappingSecondPass.java new file mode 100644 index 0000000000..260532c494 --- /dev/null +++ b/src/org/hibernate/cfg/ResultSetMappingSecondPass.java @@ -0,0 +1,28 @@ +//$Id$ +package org.hibernate.cfg; + +import java.util.Map; + +import org.dom4j.Element; +import org.hibernate.MappingException; +import org.hibernate.engine.ResultSetMappingDefinition; + +/** + * @author Emmanuel Bernard + */ +public class ResultSetMappingSecondPass extends ResultSetMappingBinder implements QuerySecondPass { + private Element element; + private String path; + private Mappings mappings; + + public ResultSetMappingSecondPass(Element element, String path, Mappings mappings) { + this.element = element; + this.path = path; + this.mappings = mappings; + } + + public void doSecondPass(Map persistentClasses) throws MappingException { + ResultSetMappingDefinition definition = buildResultSetMappingDefinition( element, path, mappings); + mappings.addResultSetMapping( definition ); + } +} diff --git a/src/org/hibernate/cfg/SecondPass.java b/src/org/hibernate/cfg/SecondPass.java new file mode 100644 index 0000000000..25e3ada73a --- /dev/null +++ b/src/org/hibernate/cfg/SecondPass.java @@ -0,0 +1,18 @@ +//$Id$ +package org.hibernate.cfg; + +import java.io.Serializable; + +import org.hibernate.MappingException; + +/** + * Second pass operation + * + * @author Emmanuel Bernard + */ +public interface SecondPass extends Serializable { + + void doSecondPass(java.util.Map persistentClasses) + throws MappingException; + +} diff --git a/src/org/hibernate/cfg/Settings.java b/src/org/hibernate/cfg/Settings.java new file mode 100644 index 0000000000..d21956bb8f --- /dev/null +++ b/src/org/hibernate/cfg/Settings.java @@ -0,0 +1,451 @@ +//$Id$ +package org.hibernate.cfg; + +import java.util.Map; + +import org.hibernate.cache.CacheProvider; +import org.hibernate.cache.QueryCacheFactory; +import org.hibernate.connection.ConnectionProvider; +import org.hibernate.dialect.Dialect; +import org.hibernate.hql.QueryTranslatorFactory; +import org.hibernate.jdbc.BatcherFactory; +import org.hibernate.transaction.TransactionFactory; +import org.hibernate.transaction.TransactionManagerLookup; +import org.hibernate.exception.SQLExceptionConverter; +import org.hibernate.EntityMode; +import org.hibernate.ConnectionReleaseMode; + +/** + * Settings that affect the behaviour of Hibernate at runtime. + * + * @author Gavin King + */ +public final class Settings { + + private boolean showSql; + private boolean formatSql; + private Integer maximumFetchDepth; + private Map querySubstitutions; + private Dialect dialect; + private int jdbcBatchSize; + private int defaultBatchFetchSize; + private boolean scrollableResultSetsEnabled; + private boolean getGeneratedKeysEnabled; + private String defaultSchemaName; + private String defaultCatalogName; + private Integer jdbcFetchSize; + private String sessionFactoryName; + private boolean autoCreateSchema; + private boolean autoDropSchema; + private boolean autoUpdateSchema; + private boolean autoValidateSchema; + private boolean queryCacheEnabled; + private boolean structuredCacheEntriesEnabled; + private boolean secondLevelCacheEnabled; + private String cacheRegionPrefix; + private boolean minimalPutsEnabled; + private boolean commentsEnabled; + private boolean statisticsEnabled; + private boolean jdbcBatchVersionedData; + private boolean identifierRollbackEnabled; + private boolean flushBeforeCompletionEnabled; + private boolean autoCloseSessionEnabled; + private ConnectionReleaseMode connectionReleaseMode; + private CacheProvider cacheProvider; + private QueryCacheFactory queryCacheFactory; + private ConnectionProvider connectionProvider; + private TransactionFactory transactionFactory; + private TransactionManagerLookup transactionManagerLookup; + private BatcherFactory batcherFactory; + private QueryTranslatorFactory queryTranslatorFactory; + private SQLExceptionConverter sqlExceptionConverter; + private boolean wrapResultSetsEnabled; + private boolean orderUpdatesEnabled; + private boolean orderInsertsEnabled; + private EntityMode defaultEntityMode; + private boolean dataDefinitionImplicitCommit; + private boolean dataDefinitionInTransactionSupported; + private boolean strictJPAQLCompliance; + private boolean namedQueryStartupCheckingEnabled; +// private BytecodeProvider bytecodeProvider; + + /** + * Package protected constructor + */ + Settings() { + } + + // public getters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public String getDefaultSchemaName() { + return defaultSchemaName; + } + + public String getDefaultCatalogName() { + return defaultCatalogName; + } + + public Dialect getDialect() { + return dialect; + } + + public int getJdbcBatchSize() { + return jdbcBatchSize; + } + + public int getDefaultBatchFetchSize() { + return defaultBatchFetchSize; + } + + public Map getQuerySubstitutions() { + return querySubstitutions; + } + + public boolean isShowSqlEnabled() { + return showSql; + } + + public boolean isFormatSqlEnabled() { + return formatSql; + } + + public boolean isIdentifierRollbackEnabled() { + return identifierRollbackEnabled; + } + + public boolean isScrollableResultSetsEnabled() { + return scrollableResultSetsEnabled; + } + + public boolean isGetGeneratedKeysEnabled() { + return getGeneratedKeysEnabled; + } + + public boolean isMinimalPutsEnabled() { + return minimalPutsEnabled; + } + + public Integer getJdbcFetchSize() { + return jdbcFetchSize; + } + + public ConnectionProvider getConnectionProvider() { + return connectionProvider; + } + + public TransactionFactory getTransactionFactory() { + return transactionFactory; + } + + public String getSessionFactoryName() { + return sessionFactoryName; + } + + public boolean isAutoCreateSchema() { + return autoCreateSchema; + } + + public boolean isAutoDropSchema() { + return autoDropSchema; + } + + public boolean isAutoUpdateSchema() { + return autoUpdateSchema; + } + + public Integer getMaximumFetchDepth() { + return maximumFetchDepth; + } + + public CacheProvider getCacheProvider() { + return cacheProvider; + } + + public TransactionManagerLookup getTransactionManagerLookup() { + return transactionManagerLookup; + } + + public boolean isQueryCacheEnabled() { + return queryCacheEnabled; + } + + public boolean isCommentsEnabled() { + return commentsEnabled; + } + + public boolean isSecondLevelCacheEnabled() { + return secondLevelCacheEnabled; + } + + public String getCacheRegionPrefix() { + return cacheRegionPrefix; + } + + public QueryCacheFactory getQueryCacheFactory() { + return queryCacheFactory; + } + + public boolean isStatisticsEnabled() { + return statisticsEnabled; + } + + public boolean isJdbcBatchVersionedData() { + return jdbcBatchVersionedData; + } + + public boolean isFlushBeforeCompletionEnabled() { + return flushBeforeCompletionEnabled; + } + + public BatcherFactory getBatcherFactory() { + return batcherFactory; + } + + public boolean isAutoCloseSessionEnabled() { + return autoCloseSessionEnabled; + } + + public ConnectionReleaseMode getConnectionReleaseMode() { + return connectionReleaseMode; + } + + public QueryTranslatorFactory getQueryTranslatorFactory() { + return queryTranslatorFactory; + } + + public SQLExceptionConverter getSQLExceptionConverter() { + return sqlExceptionConverter; + } + + public boolean isWrapResultSetsEnabled() { + return wrapResultSetsEnabled; + } + + public boolean isOrderUpdatesEnabled() { + return orderUpdatesEnabled; + } + + public boolean isOrderInsertsEnabled() { + return orderInsertsEnabled; + } + + public boolean isStructuredCacheEntriesEnabled() { + return structuredCacheEntriesEnabled; + } + + public EntityMode getDefaultEntityMode() { + return defaultEntityMode; + } + + public boolean isAutoValidateSchema() { + return autoValidateSchema; + } + + public boolean isDataDefinitionImplicitCommit() { + return dataDefinitionImplicitCommit; + } + + public boolean isDataDefinitionInTransactionSupported() { + return dataDefinitionInTransactionSupported; + } + + public boolean isStrictJPAQLCompliance() { + return strictJPAQLCompliance; + } + + public boolean isNamedQueryStartupCheckingEnabled() { + return namedQueryStartupCheckingEnabled; + } + + + // package protected setters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + void setDefaultSchemaName(String string) { + defaultSchemaName = string; + } + + void setDefaultCatalogName(String string) { + defaultCatalogName = string; + } + + void setDialect(Dialect dialect) { + this.dialect = dialect; + } + + void setJdbcBatchSize(int i) { + jdbcBatchSize = i; + } + + void setDefaultBatchFetchSize(int i) { + defaultBatchFetchSize = i; + } + + void setQuerySubstitutions(Map map) { + querySubstitutions = map; + } + + void setShowSqlEnabled(boolean b) { + showSql = b; + } + + void setFormatSqlEnabled(boolean b) { + formatSql = b; + } + + void setIdentifierRollbackEnabled(boolean b) { + identifierRollbackEnabled = b; + } + + void setMinimalPutsEnabled(boolean b) { + minimalPutsEnabled = b; + } + + void setScrollableResultSetsEnabled(boolean b) { + scrollableResultSetsEnabled = b; + } + + void setGetGeneratedKeysEnabled(boolean b) { + getGeneratedKeysEnabled = b; + } + + void setJdbcFetchSize(Integer integer) { + jdbcFetchSize = integer; + } + + void setConnectionProvider(ConnectionProvider provider) { + connectionProvider = provider; + } + + void setTransactionFactory(TransactionFactory factory) { + transactionFactory = factory; + } + + void setSessionFactoryName(String string) { + sessionFactoryName = string; + } + + void setAutoCreateSchema(boolean b) { + autoCreateSchema = b; + } + + void setAutoDropSchema(boolean b) { + autoDropSchema = b; + } + + void setAutoUpdateSchema(boolean b) { + autoUpdateSchema = b; + } + + void setMaximumFetchDepth(Integer i) { + maximumFetchDepth = i; + } + + void setCacheProvider(CacheProvider cacheProvider) { + this.cacheProvider = cacheProvider; + } + + void setTransactionManagerLookup(TransactionManagerLookup lookup) { + transactionManagerLookup = lookup; + } + + void setQueryCacheEnabled(boolean b) { + queryCacheEnabled = b; + } + + void setCommentsEnabled(boolean commentsEnabled) { + this.commentsEnabled = commentsEnabled; + } + + void setSecondLevelCacheEnabled(boolean secondLevelCacheEnabled) { + this.secondLevelCacheEnabled = secondLevelCacheEnabled; + } + + void setCacheRegionPrefix(String cacheRegionPrefix) { + this.cacheRegionPrefix = cacheRegionPrefix; + } + + void setQueryCacheFactory(QueryCacheFactory queryCacheFactory) { + this.queryCacheFactory = queryCacheFactory; + } + + void setStatisticsEnabled(boolean statisticsEnabled) { + this.statisticsEnabled = statisticsEnabled; + } + + void setJdbcBatchVersionedData(boolean jdbcBatchVersionedData) { + this.jdbcBatchVersionedData = jdbcBatchVersionedData; + } + + void setFlushBeforeCompletionEnabled(boolean flushBeforeCompletionEnabled) { + this.flushBeforeCompletionEnabled = flushBeforeCompletionEnabled; + } + + void setBatcherFactory(BatcherFactory batcher) { + this.batcherFactory = batcher; + } + + void setAutoCloseSessionEnabled(boolean autoCloseSessionEnabled) { + this.autoCloseSessionEnabled = autoCloseSessionEnabled; + } + + void setConnectionReleaseMode(ConnectionReleaseMode connectionReleaseMode) { + this.connectionReleaseMode = connectionReleaseMode; + } + + void setQueryTranslatorFactory(QueryTranslatorFactory queryTranslatorFactory) { + this.queryTranslatorFactory = queryTranslatorFactory; + } + + void setSQLExceptionConverter(SQLExceptionConverter sqlExceptionConverter) { + this.sqlExceptionConverter = sqlExceptionConverter; + } + + void setWrapResultSetsEnabled(boolean wrapResultSetsEnabled) { + this.wrapResultSetsEnabled = wrapResultSetsEnabled; + } + + void setOrderUpdatesEnabled(boolean orderUpdatesEnabled) { + this.orderUpdatesEnabled = orderUpdatesEnabled; + } + + void setOrderInsertsEnabled(boolean orderInsertsEnabled) { + this.orderInsertsEnabled = orderInsertsEnabled; + } + + void setStructuredCacheEntriesEnabled(boolean structuredCacheEntriesEnabled) { + this.structuredCacheEntriesEnabled = structuredCacheEntriesEnabled; + } + + void setDefaultEntityMode(EntityMode defaultEntityMode) { + this.defaultEntityMode = defaultEntityMode; + } + + void setAutoValidateSchema(boolean autoValidateSchema) { + this.autoValidateSchema = autoValidateSchema; + } + + void setDataDefinitionImplicitCommit(boolean dataDefinitionImplicitCommit) { + this.dataDefinitionImplicitCommit = dataDefinitionImplicitCommit; + } + + void setDataDefinitionInTransactionSupported(boolean dataDefinitionInTransactionSupported) { + this.dataDefinitionInTransactionSupported = dataDefinitionInTransactionSupported; + } + + void setStrictJPAQLCompliance(boolean strictJPAQLCompliance) { + this.strictJPAQLCompliance = strictJPAQLCompliance; + } + + void setNamedQueryStartupCheckingEnabled(boolean namedQueryStartupCheckingEnabled) { + this.namedQueryStartupCheckingEnabled = namedQueryStartupCheckingEnabled; + } + + +// public BytecodeProvider getBytecodeProvider() { +// return bytecodeProvider; +// } +// +// void setBytecodeProvider(BytecodeProvider bytecodeProvider) { +// this.bytecodeProvider = bytecodeProvider; +// } +} diff --git a/src/org/hibernate/cfg/SettingsFactory.java b/src/org/hibernate/cfg/SettingsFactory.java new file mode 100644 index 0000000000..e665f3edb7 --- /dev/null +++ b/src/org/hibernate/cfg/SettingsFactory.java @@ -0,0 +1,429 @@ +//$Id$ +package org.hibernate.cfg; + +import java.io.Serializable; +import java.lang.reflect.Method; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.bytecode.BytecodeProvider; +import org.hibernate.cache.CacheProvider; +import org.hibernate.cache.NoCacheProvider; +import org.hibernate.cache.QueryCacheFactory; +import org.hibernate.connection.ConnectionProvider; +import org.hibernate.connection.ConnectionProviderFactory; +import org.hibernate.dialect.Dialect; +import org.hibernate.dialect.DialectFactory; +import org.hibernate.exception.SQLExceptionConverter; +import org.hibernate.exception.SQLExceptionConverterFactory; +import org.hibernate.hql.QueryTranslatorFactory; +import org.hibernate.jdbc.BatcherFactory; +import org.hibernate.jdbc.BatchingBatcherFactory; +import org.hibernate.jdbc.NonBatchingBatcherFactory; +import org.hibernate.transaction.TransactionFactory; +import org.hibernate.transaction.TransactionFactoryFactory; +import org.hibernate.transaction.TransactionManagerLookup; +import org.hibernate.transaction.TransactionManagerLookupFactory; +import org.hibernate.util.PropertiesHelper; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +/** + * Reads configuration properties and configures a Settings instance. + * + * @author Gavin King + */ +public class SettingsFactory implements Serializable { + + private static final Log log = LogFactory.getLog(SettingsFactory.class); + public static final String DEF_CACHE_PROVIDER = NoCacheProvider.class.getName(); + + protected SettingsFactory() throws HibernateException {} + + public Settings buildSettings(Properties props) { + Settings settings = new Settings(); + + //SessionFactory name: + + String sessionFactoryName = props.getProperty(Environment.SESSION_FACTORY_NAME); + settings.setSessionFactoryName(sessionFactoryName); + + //JDBC and connection settings: + + ConnectionProvider connections = createConnectionProvider(props); + settings.setConnectionProvider(connections); + + //Interrogate JDBC metadata + + String databaseName = null; + int databaseMajorVersion = 0; + boolean metaSupportsScrollable = false; + boolean metaSupportsGetGeneratedKeys = false; + boolean metaSupportsBatchUpdates = false; + boolean metaReportsDDLCausesTxnCommit = false; + boolean metaReportsDDLInTxnSupported = true; + + // 'hibernate.temp.use_jdbc_metadata_defaults' is a temporary magic value. + // The need for it is intended to be alleviated with 3.3 developement, thus it is + // not defined as an Environment constant... + // it is used to control whether we should consult the JDBC metadata to determine + // certain Settings default values; it is useful to *not* do this when the database + // may not be available (mainly in tools usage). + boolean useJdbcMetadata = PropertiesHelper.getBoolean( "hibernate.temp.use_jdbc_metadata_defaults", props, true ); + if ( useJdbcMetadata ) { + try { + Connection conn = connections.getConnection(); + try { + DatabaseMetaData meta = conn.getMetaData(); + databaseName = meta.getDatabaseProductName(); + databaseMajorVersion = getDatabaseMajorVersion(meta); + log.info("RDBMS: " + databaseName + ", version: " + meta.getDatabaseProductVersion() ); + log.info("JDBC driver: " + meta.getDriverName() + ", version: " + meta.getDriverVersion() ); + + metaSupportsScrollable = meta.supportsResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE); + metaSupportsBatchUpdates = meta.supportsBatchUpdates(); + metaReportsDDLCausesTxnCommit = meta.dataDefinitionCausesTransactionCommit(); + metaReportsDDLInTxnSupported = !meta.dataDefinitionIgnoredInTransactions(); + + if ( Environment.jvmSupportsGetGeneratedKeys() ) { + try { + Boolean result = (Boolean) DatabaseMetaData.class.getMethod("supportsGetGeneratedKeys", null) + .invoke(meta, null); + metaSupportsGetGeneratedKeys = result.booleanValue(); + } + catch (AbstractMethodError ame) { + metaSupportsGetGeneratedKeys = false; + } + catch (Exception e) { + metaSupportsGetGeneratedKeys = false; + } + } + + } + finally { + connections.closeConnection(conn); + } + } + catch (SQLException sqle) { + log.warn("Could not obtain connection metadata", sqle); + } + catch (UnsupportedOperationException uoe) { + // user supplied JDBC connections + } + } + settings.setDataDefinitionImplicitCommit( metaReportsDDLCausesTxnCommit ); + settings.setDataDefinitionInTransactionSupported( metaReportsDDLInTxnSupported ); + + + //SQL Dialect: + Dialect dialect = determineDialect( props, databaseName, databaseMajorVersion ); + settings.setDialect(dialect); + + //use dialect default properties + final Properties properties = new Properties(); + properties.putAll( dialect.getDefaultProperties() ); + properties.putAll(props); + + // Transaction settings: + + TransactionFactory transactionFactory = createTransactionFactory(properties); + settings.setTransactionFactory(transactionFactory); + settings.setTransactionManagerLookup( createTransactionManagerLookup(properties) ); + + boolean flushBeforeCompletion = PropertiesHelper.getBoolean(Environment.FLUSH_BEFORE_COMPLETION, properties); + log.info("Automatic flush during beforeCompletion(): " + enabledDisabled(flushBeforeCompletion) ); + settings.setFlushBeforeCompletionEnabled(flushBeforeCompletion); + + boolean autoCloseSession = PropertiesHelper.getBoolean(Environment.AUTO_CLOSE_SESSION, properties); + log.info("Automatic session close at end of transaction: " + enabledDisabled(autoCloseSession) ); + settings.setAutoCloseSessionEnabled(autoCloseSession); + + //JDBC and connection settings: + + int batchSize = PropertiesHelper.getInt(Environment.STATEMENT_BATCH_SIZE, properties, 0); + if ( !metaSupportsBatchUpdates ) batchSize = 0; + if (batchSize>0) log.info("JDBC batch size: " + batchSize); + settings.setJdbcBatchSize(batchSize); + boolean jdbcBatchVersionedData = PropertiesHelper.getBoolean(Environment.BATCH_VERSIONED_DATA, properties, false); + if (batchSize>0) log.info("JDBC batch updates for versioned data: " + enabledDisabled(jdbcBatchVersionedData) ); + settings.setJdbcBatchVersionedData(jdbcBatchVersionedData); + settings.setBatcherFactory( createBatcherFactory(properties, batchSize) ); + + boolean useScrollableResultSets = PropertiesHelper.getBoolean(Environment.USE_SCROLLABLE_RESULTSET, properties, metaSupportsScrollable); + log.info("Scrollable result sets: " + enabledDisabled(useScrollableResultSets) ); + settings.setScrollableResultSetsEnabled(useScrollableResultSets); + + boolean wrapResultSets = PropertiesHelper.getBoolean(Environment.WRAP_RESULT_SETS, properties, false); + log.debug( "Wrap result sets: " + enabledDisabled(wrapResultSets) ); + settings.setWrapResultSetsEnabled(wrapResultSets); + + boolean useGetGeneratedKeys = PropertiesHelper.getBoolean(Environment.USE_GET_GENERATED_KEYS, properties, metaSupportsGetGeneratedKeys); + log.info("JDBC3 getGeneratedKeys(): " + enabledDisabled(useGetGeneratedKeys) ); + settings.setGetGeneratedKeysEnabled(useGetGeneratedKeys); + + Integer statementFetchSize = PropertiesHelper.getInteger(Environment.STATEMENT_FETCH_SIZE, properties); + if (statementFetchSize!=null) log.info("JDBC result set fetch size: " + statementFetchSize); + settings.setJdbcFetchSize(statementFetchSize); + + String releaseModeName = PropertiesHelper.getString( Environment.RELEASE_CONNECTIONS, properties, "auto" ); + log.info( "Connection release mode: " + releaseModeName ); + ConnectionReleaseMode releaseMode; + if ( "auto".equals(releaseModeName) ) { + releaseMode = transactionFactory.getDefaultReleaseMode(); + } + else { + releaseMode = ConnectionReleaseMode.parse( releaseModeName ); + if ( releaseMode == ConnectionReleaseMode.AFTER_STATEMENT && !connections.supportsAggressiveRelease() ) { + log.warn( "Overriding release mode as connection provider does not support 'after_statement'" ); + releaseMode = ConnectionReleaseMode.AFTER_TRANSACTION; + } + } + settings.setConnectionReleaseMode( releaseMode ); + + //SQL Generation settings: + + String defaultSchema = properties.getProperty(Environment.DEFAULT_SCHEMA); + String defaultCatalog = properties.getProperty(Environment.DEFAULT_CATALOG); + if (defaultSchema!=null) log.info("Default schema: " + defaultSchema); + if (defaultCatalog!=null) log.info("Default catalog: " + defaultCatalog); + settings.setDefaultSchemaName(defaultSchema); + settings.setDefaultCatalogName(defaultCatalog); + + Integer maxFetchDepth = PropertiesHelper.getInteger(Environment.MAX_FETCH_DEPTH, properties); + if (maxFetchDepth!=null) log.info("Maximum outer join fetch depth: " + maxFetchDepth); + settings.setMaximumFetchDepth(maxFetchDepth); + int batchFetchSize = PropertiesHelper.getInt(Environment.DEFAULT_BATCH_FETCH_SIZE, properties, 1); + log.info("Default batch fetch size: " + batchFetchSize); + settings.setDefaultBatchFetchSize(batchFetchSize); + + boolean comments = PropertiesHelper.getBoolean(Environment.USE_SQL_COMMENTS, properties); + log.info( "Generate SQL with comments: " + enabledDisabled(comments) ); + settings.setCommentsEnabled(comments); + + boolean orderUpdates = PropertiesHelper.getBoolean( Environment.ORDER_UPDATES, properties ); + log.info( "Order SQL updates by primary key: " + enabledDisabled( orderUpdates ) ); + settings.setOrderUpdatesEnabled( orderUpdates ); + + boolean orderInserts = PropertiesHelper.getBoolean(Environment.ORDER_INSERTS, properties); + log.info( "Order SQL inserts for batching: " + enabledDisabled( orderInserts ) ); + settings.setOrderInsertsEnabled( orderInserts ); + + //Query parser settings: + + settings.setQueryTranslatorFactory( createQueryTranslatorFactory(properties) ); + + Map querySubstitutions = PropertiesHelper.toMap(Environment.QUERY_SUBSTITUTIONS, " ,=;:\n\t\r\f", properties); + log.info("Query language substitutions: " + querySubstitutions); + settings.setQuerySubstitutions(querySubstitutions); + + boolean jpaqlCompliance = PropertiesHelper.getBoolean( Environment.JPAQL_STRICT_COMPLIANCE, properties, false ); + settings.setStrictJPAQLCompliance( jpaqlCompliance ); + log.info( "JPA-QL strict compliance: " + enabledDisabled( jpaqlCompliance ) ); + + // Second-level / query cache: + + boolean useSecondLevelCache = PropertiesHelper.getBoolean(Environment.USE_SECOND_LEVEL_CACHE, properties, true); + log.info( "Second-level cache: " + enabledDisabled(useSecondLevelCache) ); + settings.setSecondLevelCacheEnabled(useSecondLevelCache); + + boolean useQueryCache = PropertiesHelper.getBoolean(Environment.USE_QUERY_CACHE, properties); + log.info( "Query cache: " + enabledDisabled(useQueryCache) ); + settings.setQueryCacheEnabled(useQueryCache); + + if ( useSecondLevelCache || useQueryCache ) { + // The cache provider is needed when we either have second-level cache enabled + // or query cache enabled. Note that useSecondLevelCache is enabled by default + settings.setCacheProvider( createCacheProvider( properties ) ); + } + else { + settings.setCacheProvider( new NoCacheProvider() ); + } + + boolean useMinimalPuts = PropertiesHelper.getBoolean( + Environment.USE_MINIMAL_PUTS, properties, settings.getCacheProvider().isMinimalPutsEnabledByDefault() + ); + log.info( "Optimize cache for minimal puts: " + enabledDisabled(useMinimalPuts) ); + settings.setMinimalPutsEnabled(useMinimalPuts); + + String prefix = properties.getProperty(Environment.CACHE_REGION_PREFIX); + if ( StringHelper.isEmpty(prefix) ) prefix=null; + if (prefix!=null) log.info("Cache region prefix: "+ prefix); + settings.setCacheRegionPrefix(prefix); + + boolean useStructuredCacheEntries = PropertiesHelper.getBoolean(Environment.USE_STRUCTURED_CACHE, properties, false); + log.info( "Structured second-level cache entries: " + enabledDisabled(useStructuredCacheEntries) ); + settings.setStructuredCacheEntriesEnabled(useStructuredCacheEntries); + + if (useQueryCache) settings.setQueryCacheFactory( createQueryCacheFactory(properties) ); + + //SQL Exception converter: + + SQLExceptionConverter sqlExceptionConverter; + try { + sqlExceptionConverter = SQLExceptionConverterFactory.buildSQLExceptionConverter( dialect, properties ); + } + catch(HibernateException e) { + log.warn("Error building SQLExceptionConverter; using minimal converter"); + sqlExceptionConverter = SQLExceptionConverterFactory.buildMinimalSQLExceptionConverter(); + } + settings.setSQLExceptionConverter(sqlExceptionConverter); + + //Statistics and logging: + + boolean showSql = PropertiesHelper.getBoolean(Environment.SHOW_SQL, properties); + if (showSql) log.info("Echoing all SQL to stdout"); + settings.setShowSqlEnabled(showSql); + + boolean formatSql = PropertiesHelper.getBoolean(Environment.FORMAT_SQL, properties); + settings.setFormatSqlEnabled(formatSql); + + boolean useStatistics = PropertiesHelper.getBoolean(Environment.GENERATE_STATISTICS, properties); + log.info( "Statistics: " + enabledDisabled(useStatistics) ); + settings.setStatisticsEnabled(useStatistics); + + boolean useIdentifierRollback = PropertiesHelper.getBoolean(Environment.USE_IDENTIFIER_ROLLBACK, properties); + log.info( "Deleted entity synthetic identifier rollback: " + enabledDisabled(useIdentifierRollback) ); + settings.setIdentifierRollbackEnabled(useIdentifierRollback); + + //Schema export: + + String autoSchemaExport = properties.getProperty(Environment.HBM2DDL_AUTO); + if ( "validate".equals(autoSchemaExport) ) settings.setAutoValidateSchema(true); + if ( "update".equals(autoSchemaExport) ) settings.setAutoUpdateSchema(true); + if ( "create".equals(autoSchemaExport) ) settings.setAutoCreateSchema(true); + if ( "create-drop".equals(autoSchemaExport) ) { + settings.setAutoCreateSchema(true); + settings.setAutoDropSchema(true); + } + + EntityMode defaultEntityMode = EntityMode.parse( properties.getProperty( Environment.DEFAULT_ENTITY_MODE ) ); + log.info( "Default entity-mode: " + defaultEntityMode ); + settings.setDefaultEntityMode( defaultEntityMode ); + + boolean namedQueryChecking = PropertiesHelper.getBoolean( Environment.QUERY_STARTUP_CHECKING, properties, true ); + log.info( "Named query checking : " + enabledDisabled( namedQueryChecking ) ); + settings.setNamedQueryStartupCheckingEnabled( namedQueryChecking ); + +// String provider = properties.getProperty( Environment.BYTECODE_PROVIDER ); +// log.info( "Bytecode provider name : " + provider ); +// BytecodeProvider bytecodeProvider = buildBytecodeProvider( provider ); +// settings.setBytecodeProvider( bytecodeProvider ); + + return settings; + + } + + protected BytecodeProvider buildBytecodeProvider(String providerName) { + if ( "javassist".equals( providerName ) ) { + return new org.hibernate.bytecode.javassist.BytecodeProviderImpl(); + } + else if ( "cglib".equals( providerName ) ) { + return new org.hibernate.bytecode.cglib.BytecodeProviderImpl(); + } + else { + log.debug( "using cglib as bytecode provider by default" ); + return new org.hibernate.bytecode.cglib.BytecodeProviderImpl(); + } + } + + private int getDatabaseMajorVersion(DatabaseMetaData meta) { + try { + Method gdbmvMethod = DatabaseMetaData.class.getMethod("getDatabaseMajorVersion", null); + return ( (Integer) gdbmvMethod.invoke(meta, null) ).intValue(); + } + catch (NoSuchMethodException nsme) { + return 0; + } + catch (Throwable t) { + log.debug("could not get database version from JDBC metadata"); + return 0; + } + } + + private static String enabledDisabled(boolean value) { + return value ? "enabled" : "disabled"; + } + + protected QueryCacheFactory createQueryCacheFactory(Properties properties) { + String queryCacheFactoryClassName = PropertiesHelper.getString( + Environment.QUERY_CACHE_FACTORY, properties, "org.hibernate.cache.StandardQueryCacheFactory" + ); + log.info("Query cache factory: " + queryCacheFactoryClassName); + try { + return (QueryCacheFactory) ReflectHelper.classForName(queryCacheFactoryClassName).newInstance(); + } + catch (Exception cnfe) { + throw new HibernateException("could not instantiate QueryCacheFactory: " + queryCacheFactoryClassName, cnfe); + } + } + + protected CacheProvider createCacheProvider(Properties properties) { + String cacheClassName = PropertiesHelper.getString( + Environment.CACHE_PROVIDER, properties, DEF_CACHE_PROVIDER + ); + log.info("Cache provider: " + cacheClassName); + try { + return (CacheProvider) ReflectHelper.classForName(cacheClassName).newInstance(); + } + catch (Exception cnfe) { + throw new HibernateException("could not instantiate CacheProvider: " + cacheClassName, cnfe); + } + } + + protected QueryTranslatorFactory createQueryTranslatorFactory(Properties properties) { + String className = PropertiesHelper.getString( + Environment.QUERY_TRANSLATOR, properties, "org.hibernate.hql.ast.ASTQueryTranslatorFactory" + ); + log.info("Query translator: " + className); + try { + return (QueryTranslatorFactory) ReflectHelper.classForName(className).newInstance(); + } + catch (Exception cnfe) { + throw new HibernateException("could not instantiate QueryTranslatorFactory: " + className, cnfe); + } + } + + protected BatcherFactory createBatcherFactory(Properties properties, int batchSize) { + String batcherClass = properties.getProperty(Environment.BATCH_STRATEGY); + if (batcherClass==null) { + return batchSize==0 ? + (BatcherFactory) new NonBatchingBatcherFactory() : + (BatcherFactory) new BatchingBatcherFactory(); + } + else { + log.info("Batcher factory: " + batcherClass); + try { + return (BatcherFactory) ReflectHelper.classForName(batcherClass).newInstance(); + } + catch (Exception cnfe) { + throw new HibernateException("could not instantiate BatcherFactory: " + batcherClass, cnfe); + } + } + } + + protected ConnectionProvider createConnectionProvider(Properties properties) { + return ConnectionProviderFactory.newConnectionProvider(properties); + } + + protected TransactionFactory createTransactionFactory(Properties properties) { + return TransactionFactoryFactory.buildTransactionFactory(properties); + } + + protected TransactionManagerLookup createTransactionManagerLookup(Properties properties) { + return TransactionManagerLookupFactory.getTransactionManagerLookup(properties); + } + + private Dialect determineDialect(Properties props, String databaseName, int databaseMajorVersion) { + return DialectFactory.buildDialect( props, databaseName, databaseMajorVersion ); + } + +} diff --git a/src/org/hibernate/cfg/package.html b/src/org/hibernate/cfg/package.html new file mode 100755 index 0000000000..18c3eb8ea6 --- /dev/null +++ b/src/org/hibernate/cfg/package.html @@ -0,0 +1,9 @@ + + + +

    + This package defines APIs for configuring Hibernate, and classes + for building the Hibernate configuration-time metamodel. +

    + + diff --git a/src/org/hibernate/classic/Lifecycle.java b/src/org/hibernate/classic/Lifecycle.java new file mode 100644 index 0000000000..35b4e80399 --- /dev/null +++ b/src/org/hibernate/classic/Lifecycle.java @@ -0,0 +1,94 @@ +//$Id$ +package org.hibernate.classic; + +import java.io.Serializable; + +import org.hibernate.CallbackException; +import org.hibernate.Session; + +/** + * Provides callbacks from the Session to the persistent object. + * Persistent classes may implement this interface but they are not + * required to.
    + *
    + * onSave: called just before the object is saved
    + * onUpdate: called just before an object is updated, + * ie. when Session.update() is called
    + * onDelete: called just before an object is deleted
    + * onLoad: called just after an object is loaded
    + *
    + * onLoad() may be used to initialize transient properties of the + * object from its persistent state. It may not be used to load + * dependent objects since the Session interface may not be + * invoked from inside this method.
    + *
    + * A further intended usage of onLoad(), onSave() and + * onUpdate() is to store a reference to the Session + * for later use.
    + *
    + * If onSave(), onUpdate() or onDelete() return + * VETO, the operation is silently vetoed. If a + * CallbackException is thrown, the operation is vetoed and the + * exception is passed back to the application.
    + *
    + * Note that onSave() is called after an identifier is assigned + * to the object, except when identity column key generation is used. + * + * @see CallbackException + * @author Gavin King + */ +public interface Lifecycle { + + /** + * Return value to veto the action (true) + */ + public static final boolean VETO = true; + + /** + * Return value to accept the action (false) + */ + public static final boolean NO_VETO = false; + + /** + * Called when an entity is saved. + * @param s the session + * @return true to veto save + * @throws CallbackException + */ + public boolean onSave(Session s) throws CallbackException; + + /** + * Called when an entity is passed to Session.update(). + * This method is not called every time the object's + * state is persisted during a flush. + * @param s the session + * @return true to veto update + * @throws CallbackException + */ + public boolean onUpdate(Session s) throws CallbackException; + + /** + * Called when an entity is deleted. + * @param s the session + * @return true to veto delete + * @throws CallbackException + */ + public boolean onDelete(Session s) throws CallbackException; + + /** + * Called after an entity is loaded. It is illegal to + * access the Session from inside this method. + * However, the object may keep a reference to the session + * for later use. + * + * @param s the session + * @param id the identifier + */ + public void onLoad(Session s, Serializable id); +} + + + + + + diff --git a/src/org/hibernate/classic/Session.java b/src/org/hibernate/classic/Session.java new file mode 100755 index 0000000000..416905ee03 --- /dev/null +++ b/src/org/hibernate/classic/Session.java @@ -0,0 +1,352 @@ +//$Id$ +package org.hibernate.classic; + +import java.io.Serializable; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +import org.hibernate.HibernateException; +import org.hibernate.Query; +import org.hibernate.type.Type; + +/** + * An extension of the Session API, including all + * deprecated methods from Hibernate2. This interface is + * provided to allow easier migration of existing applications. + * New code should use org.hibernate.Session. + * @author Gavin King + */ +public interface Session extends org.hibernate.Session { + + /** + * Copy the state of the given object onto the persistent object with the same + * identifier. If there is no persistent instance currently associated with + * the session, it will be loaded. Return the persistent instance. If the + * given instance is unsaved or does not exist in the database, save it and + * return it as a newly persistent instance. Otherwise, the given instance + * does not become associated with the session. + * + * @deprecated use {@link org.hibernate.Session#merge(Object)} + * + * @param object a transient instance with state to be copied + * @return an updated persistent instance + */ + public Object saveOrUpdateCopy(Object object) throws HibernateException; + + /** + * Copy the state of the given object onto the persistent object with the + * given identifier. If there is no persistent instance currently associated + * with the session, it will be loaded. Return the persistent instance. If + * there is no database row with the given identifier, save the given instance + * and return it as a newly persistent instance. Otherwise, the given instance + * does not become associated with the session. + * + * @deprecated with no replacement + * + * @param object a persistent or transient instance with state to be copied + * @param id the identifier of the instance to copy to + * @return an updated persistent instance + */ + public Object saveOrUpdateCopy(Object object, Serializable id) throws HibernateException; + + /** + * Copy the state of the given object onto the persistent object with the same + * identifier. If there is no persistent instance currently associated with + * the session, it will be loaded. Return the persistent instance. If the + * given instance is unsaved or does not exist in the database, save it and + * return it as a newly persistent instance. Otherwise, the given instance + * does not become associated with the session. + * + * @deprecated use {@link org.hibernate.Session#merge(String, Object)} + * + * @param object a transient instance with state to be copied + * @return an updated persistent instance + */ + public Object saveOrUpdateCopy(String entityName, Object object) throws HibernateException; + + /** + * Copy the state of the given object onto the persistent object with the + * given identifier. If there is no persistent instance currently associated + * with the session, it will be loaded. Return the persistent instance. If + * there is no database row with the given identifier, save the given instance + * and return it as a newly persistent instance. Otherwise, the given instance + * does not become associated with the session. + * + * @deprecated with no replacement + * + * @param object a persistent or transient instance with state to be copied + * @param id the identifier of the instance to copy to + * @return an updated persistent instance + */ + public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) throws HibernateException; + + /** + * Execute a query. + * + * @deprecated use {@link #createQuery}.{@link Query#list()} + * + * @param query a query expressed in Hibernate's query language + * @return a distinct list of instances (or arrays of instances) + * @throws HibernateException + */ + public List find(String query) throws HibernateException; + + /** + * Execute a query with bind parameters, binding a value to a "?" parameter + * in the query string. + * + * @deprecated use {@link #createQuery}.setXYZ.{@link Query#list()} + * + * @param query the query string + * @param value a value to be bound to a "?" placeholder (JDBC IN parameter). + * @param type the Hibernate type of the value + * @see org.hibernate.Hibernate for access to Type instances + * @return a distinct list of instances (or arrays of instances) + * @throws HibernateException + */ + public List find(String query, Object value, Type type) throws HibernateException; + + /** + * Execute a query with bind parameters, binding an array of values to "?" + * parameters in the query string. + * + * @deprecated use {@link #createQuery}.setXYZ.{@link Query#list()} + * + * @param query the query string + * @param values an array of values to be bound to the "?" placeholders (JDBC IN parameters). + * @param types an array of Hibernate types of the values + * @see org.hibernate.Hibernate for access to Type instances + * @return a distinct list of instances + * @throws HibernateException + */ + public List find(String query, Object[] values, Type[] types) throws HibernateException; + + /** + * Execute a query and return the results in an iterator. If the query has multiple + * return values, values will be returned in an array of type Object[].
    + *
    + * Entities returned as results are initialized on demand. The first SQL query returns + * identifiers only. So iterate() is usually a less efficient way to retrieve + * objects than find(). + * + * @deprecated use {@link #createQuery}.{@link Query#iterate} + * + * @param query the query string + * @return an iterator + * @throws HibernateException + */ + public Iterator iterate(String query) throws HibernateException; + + /** + * Execute a query and return the results in an iterator. Write the given value to "?" + * in the query string. If the query has multiple return values, values will be returned + * in an array of type Object[].
    + *
    + * Entities returned as results are initialized on demand. The first SQL query returns + * identifiers only. So iterate() is usually a less efficient way to retrieve + * objects than find(). + * + * @deprecated use {@link #createQuery}.setXYZ.{@link Query#iterate} + * + * @param query the query string + * @param value a value to be witten to a "?" placeholder in the query string + * @param type the hibernate type of value + * @return an iterator + * @throws HibernateException + */ + public Iterator iterate(String query, Object value, Type type) throws HibernateException; + + /** + * Execute a query and return the results in an iterator. Write the given values to "?" + * in the query string. If the query has multiple return values, values will be returned + * in an array of type Object[].
    + *
    + * Entities returned as results are initialized on demand. The first SQL query returns + * identifiers only. So iterate() is usually a less efficient way to retrieve + * objects than find(). + * + * @deprecated use {@link #createQuery}.setXYZ.{@link Query#iterate} + * + * @param query the query string + * @param values a list of values to be written to "?" placeholders in the query + * @param types a list of Hibernate types of the values + * @return an iterator + * @throws HibernateException + */ + public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException; + + /** + * Apply a filter to a persistent collection. A filter is a Hibernate query that may refer to + * this, the collection element. Filters allow efficient access to very large lazy + * collections. (Executing the filter does not initialize the collection.) + * + * @deprecated use {@link #createFilter(Object, String)}.{@link Query#list} + * + * @param collection a persistent collection to filter + * @param filter a filter query string + * @return Collection the resulting collection + * @throws HibernateException + */ + public Collection filter(Object collection, String filter) throws HibernateException; + + /** + * Apply a filter to a persistent collection. A filter is a Hibernate query that may refer to + * this, the collection element. + * + * @deprecated use {@link #createFilter(Object, String)}.setXYZ.{@link Query#list} + * + * @param collection a persistent collection to filter + * @param filter a filter query string + * @param value a value to be witten to a "?" placeholder in the query string + * @param type the hibernate type of value + * @return Collection + * @throws HibernateException + */ + public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException; + + /** + * Apply a filter to a persistent collection. + * + * Bind the given parameters to "?" placeholders. A filter is a Hibernate query that + * may refer to this, the collection element. + * + * @deprecated use {@link #createFilter(Object, String)}.setXYZ.{@link Query#list} + * + * @param collection a persistent collection to filter + * @param filter a filter query string + * @param values a list of values to be written to "?" placeholders in the query + * @param types a list of Hibernate types of the values + * @return Collection + * @throws HibernateException + */ + public Collection filter(Object collection, String filter, Object[] values, Type[] types) throws HibernateException; + + /** + * Delete all objects returned by the query. Return the number of objects deleted. + *

    + * Note that this is very different from the delete-statement support added in HQL + * since 3.1. The functionality here is to actually peform the query and then iterate + * the results calling {@link #delete(Object)} individually. + * + * @deprecated consider using HQL delete statements + * + * @param query the query string + * @return the number of instances deleted + * @throws HibernateException + */ + public int delete(String query) throws HibernateException; + + /** + * Delete all objects returned by the query. Return the number of objects deleted. + *

    + * Note that this is very different from the delete-statement support added in HQL + * since 3.1. The functionality here is to actually peform the query and then iterate + * the results calling {@link #delete(Object)} individually. + * + * @deprecated consider using HQL delete statements + * + * @param query the query string + * @param value a value to be witten to a "?" placeholder in the query string. + * @param type the hibernate type of value. + * @return the number of instances deleted + * @throws HibernateException + */ + public int delete(String query, Object value, Type type) throws HibernateException; + + /** + * Delete all objects returned by the query. Return the number of objects deleted. + *

    + * Note that this is very different from the delete-statement support added in HQL + * since 3.1. The functionality here is to actually peform the query and then iterate + * the results calling {@link #delete(Object)} individually. + * + * @deprecated consider using HQL delete statements + * + * @param query the query string + * @param values a list of values to be written to "?" placeholders in the query. + * @param types a list of Hibernate types of the values + * @return the number of instances deleted + * @throws HibernateException + */ + public int delete(String query, Object[] values, Type[] types) throws HibernateException; + + + /** + * Create a new instance of Query for the given SQL string. + * + * @deprecated will be replaced with a more Query like interface in later release + * + * @param sql a query expressed in SQL + * @param returnAlias a table alias that appears inside {} in the SQL string + * @param returnClass the returned persistent class + */ + public Query createSQLQuery(String sql, String returnAlias, Class returnClass); + + /** + * Create a new instance of Query for the given SQL string. + * + * @deprecated will be replaced with a more Query like interface in later release + * + * @param sql a query expressed in SQL + * @param returnAliases an array of table aliases that appear inside {} in the SQL string + * @param returnClasses the returned persistent classes + */ + public Query createSQLQuery(String sql, String[] returnAliases, Class[] returnClasses); + + + /** + * Persist the given transient instance, using the given identifier. This operation + * cascades to associated instances if the association is mapped with + * cascade="save-update". + * + * @deprecated declare identifier properties for all classes + * + * @param object a transient instance of a persistent class + * @param id an unused valid identifier + * @throws HibernateException + */ + public void save(Object object, Serializable id) throws HibernateException; + + /** + * Persist the given transient instance, using the given identifier. This operation + * cascades to associated instances if the association is mapped with + * cascade="save-update". + * + * @deprecated declare identifier properties for all classes + * + * @param object a transient instance of a persistent class + * @param id an unused valid identifier + * @throws HibernateException + */ + public void save(String entityName, Object object, Serializable id) throws HibernateException; + + /** + * Update the persistent state associated with the given identifier. An exception + * is thrown if there is a persistent instance with the same identifier in the + * current session. This operation cascades to associated instances + * if the association is mapped with cascade="save-update". + * + * @deprecated declare identifier properties for all classes + * + * @param object a detached instance containing updated state + * @param id identifier of persistent instance + * @throws HibernateException + */ + public void update(Object object, Serializable id) throws HibernateException; + + /** + * Update the persistent state associated with the given identifier. An exception + * is thrown if there is a persistent instance with the same identifier in the + * current session. This operation cascades to associated instances + * if the association is mapped with cascade="save-update". + * + * @deprecated declare identifier properties for all classes + * + * @param object a detached instance containing updated state + * @param id identifier of persistent instance + * @throws HibernateException + */ + public void update(String entityName, Object object, Serializable id) throws HibernateException; + +} diff --git a/src/org/hibernate/classic/Validatable.java b/src/org/hibernate/classic/Validatable.java new file mode 100644 index 0000000000..7291b0d3bb --- /dev/null +++ b/src/org/hibernate/classic/Validatable.java @@ -0,0 +1,26 @@ +//$Id$ +package org.hibernate.classic; + + +/** + * Implemented by persistent classes with invariants that must + * be checked before inserting into or updating the database. + * + * @author Gavin King + */ +public interface Validatable { + /** + * Validate the state of the object before persisting it. + * If a violation occurs, throw a ValidationFailure. + * This method must not change the state of the object by + * side-effect. + * @throws ValidationFailure if an invariant is violated + */ + public void validate() throws ValidationFailure; +} + + + + + + diff --git a/src/org/hibernate/classic/ValidationFailure.java b/src/org/hibernate/classic/ValidationFailure.java new file mode 100644 index 0000000000..279c6d78f8 --- /dev/null +++ b/src/org/hibernate/classic/ValidationFailure.java @@ -0,0 +1,33 @@ +//$Id$ +package org.hibernate.classic; + +import org.hibernate.HibernateException; + +/** + * Thrown from Validatable.validate() when an invariant + * was violated. Some applications might subclass this exception + * in order to provide more information about the violation. + * + * @author Gavin King + */ +public class ValidationFailure extends HibernateException { + + public ValidationFailure(String message) { + super(message); + } + + public ValidationFailure(String message, Exception e) { + super(message, e); + } + + public ValidationFailure(Exception e) { + super("A validation failure occurred", e); + } + +} + + + + + + diff --git a/src/org/hibernate/classic/package.html b/src/org/hibernate/classic/package.html new file mode 100755 index 0000000000..5fcbac08fc --- /dev/null +++ b/src/org/hibernate/classic/package.html @@ -0,0 +1,9 @@ + + + +

    + This package implements backward-compatibility with Hibernate 2.1 + APIs now deprecated in Hibernate3. +

    + + diff --git a/src/org/hibernate/collection/AbstractPersistentCollection.java b/src/org/hibernate/collection/AbstractPersistentCollection.java new file mode 100644 index 0000000000..18db168e9f --- /dev/null +++ b/src/org/hibernate/collection/AbstractPersistentCollection.java @@ -0,0 +1,943 @@ +//$Id$ +package org.hibernate.collection; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; + +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.LazyInitializationException; +import org.hibernate.engine.CollectionEntry; +import org.hibernate.engine.ForeignKeys; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.TypedValue; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.Type; +import org.hibernate.util.CollectionHelper; +import org.hibernate.util.EmptyIterator; +import org.hibernate.util.MarkerObject; + +/** + * Base class implementing PersistentCollection + * @see PersistentCollection + * @author Gavin King + */ +public abstract class AbstractPersistentCollection + implements Serializable, PersistentCollection { + + private transient SessionImplementor session; + private boolean initialized; + private transient List operationQueue; + private transient boolean directlyAccessible; + private transient boolean initializing; + private Object owner; + private int cachedSize = -1; + + private String role; + private Serializable key; + // collections detect changes made via their public interface and mark + // themselves as dirty as a performance optimization + private boolean dirty; + private Serializable storedSnapshot; + + public final String getRole() { + return role; + } + + public final Serializable getKey() { + return key; + } + + public final boolean isUnreferenced() { + return role==null; + } + + public final boolean isDirty() { + return dirty; + } + + public final void clearDirty() { + dirty = false; + } + + public final void dirty() { + dirty = true; + } + + public final Serializable getStoredSnapshot() { + return storedSnapshot; + } + + //Careful: these methods do not initialize the collection. + /** + * Is the initialized collection empty? + */ + public abstract boolean empty(); + /** + * Called by any read-only method of the collection interface + */ + protected final void read() { + initialize(false); + } + /** + * Called by the size() method + */ + protected boolean readSize() { + if (!initialized) { + if ( cachedSize!=-1 && !hasQueuedOperations() ) { + return true; + } + else { + throwLazyInitializationExceptionIfNotConnected(); + CollectionEntry entry = session.getPersistenceContext().getCollectionEntry(this); + CollectionPersister persister = entry.getLoadedPersister(); + if ( persister.isExtraLazy() ) { + if ( hasQueuedOperations() ) { + session.flush(); + } + cachedSize = persister.getSize( entry.getLoadedKey(), session ); + return true; + } + } + } + read(); + return false; + } + + protected Boolean readIndexExistence(Object index) { + if (!initialized) { + throwLazyInitializationExceptionIfNotConnected(); + CollectionEntry entry = session.getPersistenceContext().getCollectionEntry(this); + CollectionPersister persister = entry.getLoadedPersister(); + if ( persister.isExtraLazy() ) { + if ( hasQueuedOperations() ) { + session.flush(); + } + return new Boolean( persister.indexExists( entry.getLoadedKey(), index, session ) ); + } + } + read(); + return null; + + } + + protected Boolean readElementExistence(Object element) { + if (!initialized) { + throwLazyInitializationExceptionIfNotConnected(); + CollectionEntry entry = session.getPersistenceContext().getCollectionEntry(this); + CollectionPersister persister = entry.getLoadedPersister(); + if ( persister.isExtraLazy() ) { + if ( hasQueuedOperations() ) { + session.flush(); + } + return new Boolean( persister.elementExists( entry.getLoadedKey(), element, session ) ); + } + } + read(); + return null; + + } + + protected static final Object UNKNOWN = new MarkerObject("UNKNOWN"); + + protected Object readElementByIndex(Object index) { + if (!initialized) { + throwLazyInitializationExceptionIfNotConnected(); + CollectionEntry entry = session.getPersistenceContext().getCollectionEntry(this); + CollectionPersister persister = entry.getLoadedPersister(); + if ( persister.isExtraLazy() ) { + if ( hasQueuedOperations() ) { + session.flush(); + } + return persister.getElementByIndex( entry.getLoadedKey(), index, session, owner ); + } + } + read(); + return UNKNOWN; + + } + + protected int getCachedSize() { + return cachedSize; + } + + /** + * Is the collection currently connected to an open session? + */ + private final boolean isConnectedToSession() { + return session!=null && + session.isOpen() && + session.getPersistenceContext().containsCollection(this); + } + + /** + * Called by any writer method of the collection interface + */ + protected final void write() { + initialize(true); + dirty(); + } + /** + * Is this collection in a state that would allow us to + * "queue" operations? + */ + protected boolean isOperationQueueEnabled() { + return !initialized && + isConnectedToSession() && + isInverseCollection(); + } + /** + * Is this collection in a state that would allow us to + * "queue" puts? This is a special case, because of orphan + * delete. + */ + protected boolean isPutQueueEnabled() { + return !initialized && + isConnectedToSession() && + isInverseOneToManyOrNoOrphanDelete(); + } + /** + * Is this collection in a state that would allow us to + * "queue" clear? This is a special case, because of orphan + * delete. + */ + protected boolean isClearQueueEnabled() { + return !initialized && + isConnectedToSession() && + isInverseCollectionNoOrphanDelete(); + } + + /** + * Is this the "inverse" end of a bidirectional association? + */ + private boolean isInverseCollection() { + CollectionEntry ce = session.getPersistenceContext().getCollectionEntry(this); + return ce != null && ce.getLoadedPersister().isInverse(); + } + + /** + * Is this the "inverse" end of a bidirectional association with + * no orphan delete enabled? + */ + private boolean isInverseCollectionNoOrphanDelete() { + CollectionEntry ce = session.getPersistenceContext().getCollectionEntry(this); + return ce != null && + ce.getLoadedPersister().isInverse() && + !ce.getLoadedPersister().hasOrphanDelete(); + } + + /** + * Is this the "inverse" end of a bidirectional one-to-many, or + * of a collection with no orphan delete? + */ + private boolean isInverseOneToManyOrNoOrphanDelete() { + CollectionEntry ce = session.getPersistenceContext().getCollectionEntry(this); + return ce != null && ce.getLoadedPersister().isInverse() && ( + ce.getLoadedPersister().isOneToMany() || + !ce.getLoadedPersister().hasOrphanDelete() + ); + } + + /** + * Queue an addition + */ + protected final void queueOperation(Object element) { + if (operationQueue==null) operationQueue = new ArrayList(10); + operationQueue.add(element); + dirty = true; //needed so that we remove this collection from the second-level cache + } + + /** + * After reading all existing elements from the database, + * add the queued elements to the underlying collection. + */ + protected final void performQueuedOperations() { + for ( int i=0; i arraySize ) { + for ( int i=arraySize; i= Array.getLength(sn) || Array.get(sn, i)==null ); + } + + public boolean needsUpdating(Object entry, int i, Type elemType) throws HibernateException { + Serializable sn = getSnapshot(); + return iBag. + * Most developers seem to use Lists to represent bag semantics, + * so Hibernate follows this practice. + * + * @author Gavin King + */ +public class PersistentBag extends AbstractPersistentCollection implements List { + + protected List bag; + + public PersistentBag(SessionImplementor session) { + super(session); + } + + public PersistentBag(SessionImplementor session, Collection coll) { + super(session); + if (coll instanceof List) { + bag = (List) coll; + } + else { + bag = new ArrayList(); + Iterator iter = coll.iterator(); + while ( iter.hasNext() ) { + bag.add( iter.next() ); + } + } + setInitialized(); + setDirectlyAccessible(true); + } + + public PersistentBag() {} //needed for SOAP libraries, etc + + public boolean isWrapper(Object collection) { + return bag==collection; + } + public boolean empty() { + return bag.isEmpty(); + } + + public Iterator entries(CollectionPersister persister) { + return bag.iterator(); + } + + public Object readFrom(ResultSet rs, CollectionPersister persister, CollectionAliases descriptor, Object owner) + throws HibernateException, SQLException { + // note that if we load this collection from a cartesian product + // the multiplicity would be broken ... so use an idbag instead + Object element = persister.readElement( rs, owner, descriptor.getSuffixedElementAliases(), getSession() ) ; + if (element!=null) bag.add(element); + return element; + } + + public void beforeInitialize(CollectionPersister persister, int anticipatedSize) { + this.bag = ( List ) persister.getCollectionType().instantiate( anticipatedSize ); + } + + public boolean equalsSnapshot(CollectionPersister persister) throws HibernateException { + Type elementType = persister.getElementType(); + EntityMode entityMode = getSession().getEntityMode(); + List sn = (List) getSnapshot(); + if ( sn.size()!=bag.size() ) return false; + Iterator iter = bag.iterator(); + while ( iter.hasNext() ) { + Object elt = iter.next(); + final boolean unequal = countOccurrences(elt, bag, elementType, entityMode) != + countOccurrences(elt, sn, elementType, entityMode); + if ( unequal ) return false; + } + return true; + } + + public boolean isSnapshotEmpty(Serializable snapshot) { + return ( (Collection) snapshot ).isEmpty(); + } + + private int countOccurrences(Object element, List list, Type elementType, EntityMode entityMode) + throws HibernateException { + Iterator iter = list.iterator(); + int result=0; + while ( iter.hasNext() ) { + if ( elementType.isSame( element, iter.next(), entityMode ) ) result++; + } + return result; + } + + public Serializable getSnapshot(CollectionPersister persister) + throws HibernateException { + EntityMode entityMode = getSession().getEntityMode(); + ArrayList clonedList = new ArrayList( bag.size() ); + Iterator iter = bag.iterator(); + while ( iter.hasNext() ) { + clonedList.add( persister.getElementType().deepCopy( iter.next(), entityMode, persister.getFactory() ) ); + } + return clonedList; + } + + public Collection getOrphans(Serializable snapshot, String entityName) throws HibernateException { + List sn = (List) snapshot; + return getOrphans( sn, bag, entityName, getSession() ); + } + + + public Serializable disassemble(CollectionPersister persister) + throws HibernateException { + + int length = bag.size(); + Serializable[] result = new Serializable[length]; + for ( int i=0; i is not really a bag; + // it is *really* a set, since it can't contain the + // same element twice. It could be considered a bug + // in the mapping dtd that allows . + + // Anyway, here we implement semantics for a + // ! + + public Iterator getDeletes(CollectionPersister persister, boolean indexIsFormula) throws HibernateException { + //if ( !persister.isOneToMany() ) throw new AssertionFailure("Not implemented for Bags"); + Type elementType = persister.getElementType(); + EntityMode entityMode = getSession().getEntityMode(); + ArrayList deletes = new ArrayList(); + List sn = (List) getSnapshot(); + Iterator olditer = sn.iterator(); + int i=0; + while ( olditer.hasNext() ) { + Object old = olditer.next(); + Iterator newiter = bag.iterator(); + boolean found = false; + if ( bag.size()>i && elementType.isSame( old, bag.get(i++), entityMode ) ) { + //a shortcut if its location didn't change! + found = true; + } + else { + //search for it + //note that this code is incorrect for other than one-to-many + while ( newiter.hasNext() ) { + if ( elementType.isSame( old, newiter.next(), entityMode ) ) { + found = true; + break; + } + } + } + if (!found) deletes.add(old); + } + return deletes.iterator(); + } + + public boolean needsInserting(Object entry, int i, Type elemType) throws HibernateException { + //if ( !persister.isOneToMany() ) throw new AssertionFailure("Not implemented for Bags"); + List sn = (List) getSnapshot(); + final EntityMode entityMode = getSession().getEntityMode(); + if ( sn.size()>i && elemType.isSame( sn.get(i), entry, entityMode ) ) { + //a shortcut if its location didn't change! + return false; + } + else { + //search for it + //note that this code is incorrect for other than one-to-many + Iterator olditer = sn.iterator(); + while ( olditer.hasNext() ) { + Object old = olditer.next(); + if ( elemType.isSame( old, entry, entityMode ) ) return false; + } + return true; + } + } + + public boolean isRowUpdatePossible() { + return false; + } + + public boolean needsUpdating(Object entry, int i, Type elemType) { + //if ( !persister.isOneToMany() ) throw new AssertionFailure("Not implemented for Bags"); + return false; + } + + /** + * @see java.util.Collection#size() + */ + public int size() { + return readSize() ? getCachedSize() : bag.size(); + } + + /** + * @see java.util.Collection#isEmpty() + */ + public boolean isEmpty() { + return readSize() ? getCachedSize()==0 : bag.isEmpty(); + } + + /** + * @see java.util.Collection#contains(Object) + */ + public boolean contains(Object object) { + Boolean exists = readElementExistence(object); + return exists==null ? + bag.contains(object) : + exists.booleanValue(); + } + + /** + * @see java.util.Collection#iterator() + */ + public Iterator iterator() { + read(); + return new IteratorProxy( bag.iterator() ); + } + + /** + * @see java.util.Collection#toArray() + */ + public Object[] toArray() { + read(); + return bag.toArray(); + } + + /** + * @see java.util.Collection#toArray(Object[]) + */ + public Object[] toArray(Object[] a) { + read(); + return bag.toArray(a); + } + + /** + * @see java.util.Collection#add(Object) + */ + public boolean add(Object object) { + if ( !isOperationQueueEnabled() ) { + write(); + return bag.add(object); + } + else { + queueOperation( new SimpleAdd(object) ); + return true; + } + } + + /** + * @see java.util.Collection#remove(Object) + */ + public boolean remove(Object o) { + initialize( true ); + if ( bag.remove( o ) ) { + dirty(); + return true; + } + else { + return false; + } + } + + /** + * @see java.util.Collection#containsAll(Collection) + */ + public boolean containsAll(Collection c) { + read(); + return bag.containsAll(c); + } + + /** + * @see java.util.Collection#addAll(Collection) + */ + public boolean addAll(Collection values) { + if ( values.size()==0 ) return false; + if ( !isOperationQueueEnabled() ) { + write(); + return bag.addAll(values); + } + else { + Iterator iter = values.iterator(); + while ( iter.hasNext() ) { + queueOperation( new SimpleAdd( iter.next() ) ); + } + return values.size()>0; + } + } + + /** + * @see java.util.Collection#removeAll(Collection) + */ + public boolean removeAll(Collection c) { + if ( c.size()>0 ) { + initialize( true ); + if ( bag.removeAll( c ) ) { + dirty(); + return true; + } + else { + return false; + } + } + else { + return false; + } + } + + /** + * @see java.util.Collection#retainAll(Collection) + */ + public boolean retainAll(Collection c) { + initialize( true ); + if ( bag.retainAll( c ) ) { + dirty(); + return true; + } + else { + return false; + } + } + + /** + * @see java.util.Collection#clear() + */ + public void clear() { + if ( isClearQueueEnabled() ) { + queueOperation( new Clear() ); + } + else { + initialize( true ); + if ( ! bag.isEmpty() ) { + bag.clear(); + dirty(); + } + } + } + + public Object getIndex(Object entry, int i, CollectionPersister persister) { + throw new UnsupportedOperationException("Bags don't have indexes"); + } + + public Object getElement(Object entry) { + return entry; + } + + public Object getSnapshotElement(Object entry, int i) { + List sn = (List) getSnapshot(); + return sn.get(i); + } + + public int occurrences(Object o) { + read(); + Iterator iter = bag.iterator(); + int result=0; + while ( iter.hasNext() ) { + if ( o.equals( iter.next() ) ) result++; + } + return result; + } + + // List OPERATIONS: + + /** + * @see java.util.List#add(int, Object) + */ + public void add(int i, Object o) { + write(); + bag.add(i, o); + } + + /** + * @see java.util.List#addAll(int, Collection) + */ + public boolean addAll(int i, Collection c) { + if ( c.size()>0 ) { + write(); + return bag.addAll(i, c); + } + else { + return false; + } + } + + /** + * @see java.util.List#get(int) + */ + public Object get(int i) { + read(); + return bag.get(i); + } + + /** + * @see java.util.List#indexOf(Object) + */ + public int indexOf(Object o) { + read(); + return bag.indexOf(o); + } + + /** + * @see java.util.List#lastIndexOf(Object) + */ + public int lastIndexOf(Object o) { + read(); + return bag.lastIndexOf(o); + } + + /** + * @see java.util.List#listIterator() + */ + public ListIterator listIterator() { + read(); + return new ListIteratorProxy( bag.listIterator() ); + } + + /** + * @see java.util.List#listIterator(int) + */ + public ListIterator listIterator(int i) { + read(); + return new ListIteratorProxy( bag.listIterator(i) ); + } + + /** + * @see java.util.List#remove(int) + */ + public Object remove(int i) { + write(); + return bag.remove(i); + } + + /** + * @see java.util.List#set(int, Object) + */ + public Object set(int i, Object o) { + write(); + return bag.set(i, o); + } + + /** + * @see java.util.List#subList(int, int) + */ + public List subList(int start, int end) { + read(); + return new ListProxy( bag.subList(start, end) ); + } + + public String toString() { + read(); + return bag.toString(); + } + + /*public boolean equals(Object other) { + read(); + return bag.equals(other); + } + + public int hashCode(Object other) { + read(); + return bag.hashCode(); + }*/ + + public boolean entryExists(Object entry, int i) { + return entry!=null; + } + + /** + * Bag does not respect the collection API and do an + * JVM instance comparison to do the equals. + * The semantic is broken not to have to initialize a + * collection for a simple equals() operation. + * @see java.lang.Object#equals(java.lang.Object) + */ + public boolean equals(Object obj) { + return super.equals(obj); + } + + /** + * @see java.lang.Object#hashCode() + */ + public int hashCode() { + return super.hashCode(); + } + + final class Clear implements DelayedOperation { + public void operate() { + bag.clear(); + } + public Object getAddedInstance() { + return null; + } + public Object getOrphan() { + throw new UnsupportedOperationException("queued clear cannot be used with orphan delete"); + } + } + + final class SimpleAdd implements DelayedOperation { + private Object value; + + public SimpleAdd(Object value) { + this.value = value; + } + public void operate() { + bag.add(value); + } + public Object getAddedInstance() { + return value; + } + public Object getOrphan() { + return null; + } + } + +} diff --git a/src/org/hibernate/collection/PersistentCollection.java b/src/org/hibernate/collection/PersistentCollection.java new file mode 100644 index 0000000000..b68f3c255e --- /dev/null +++ b/src/org/hibernate/collection/PersistentCollection.java @@ -0,0 +1,303 @@ +//$Id$ +package org.hibernate.collection; + +import java.io.Serializable; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Collection; +import java.util.Iterator; + +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.loader.CollectionAliases; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.Type; + +/** + * Persistent collections are treated as value objects by Hibernate. + * ie. they have no independent existence beyond the object holding + * a reference to them. Unlike instances of entity classes, they are + * automatically deleted when unreferenced and automatically become + * persistent when held by a persistent object. Collections can be + * passed between different objects (change "roles") and this might + * cause their elements to move from one database table to another.
    + *
    + * Hibernate "wraps" a java collection in an instance of + * PersistentCollection. This mechanism is designed to support + * tracking of changes to the collection's persistent state and + * lazy instantiation of collection elements. The downside is that + * only certain abstract collection types are supported and any + * extra semantics are lost
    + *
    + * Applications should never use classes in this package + * directly, unless extending the "framework" here.
    + *
    + * Changes to structure of the collection are recorded by the + * collection calling back to the session. Changes to mutable + * elements (ie. composite elements) are discovered by cloning their + * state when the collection is initialized and comparing at flush + * time. + * + * @author Gavin King + */ +public interface PersistentCollection { + + /** + * Get the owning entity. Note that the owner is only + * set during the flush cycle, and when a new collection + * wrapper is created while loading an entity. + */ + public Object getOwner(); + /** + * Set the reference to the owning entity + */ + public void setOwner(Object entity); + + /** + * Is the collection empty? (don't try to initialize the collection) + */ + public boolean empty(); + + /** + * After flushing, re-init snapshot state. + */ + public void setSnapshot(Serializable key, String role, Serializable snapshot); + + /** + * After flushing, clear any "queued" additions, since the + * database state is now synchronized with the memory state. + */ + public void postAction(); + + /** + * return the user-visible collection (or array) instance + */ + public Object getValue(); + + /** + * Called just before reading any rows from the JDBC result set + */ + public void beginRead(); + + /** + * Called after reading all rows from the JDBC result set + */ + public boolean endRead(); + + /** + * Called after initializing from cache + */ + public boolean afterInitialize(); + + /** + * Could the application possibly have a direct reference to + * the underlying collection implementation? + */ + public boolean isDirectlyAccessible(); + + /** + * Disassociate this collection from the given session. + * @return true if this was currently associated with the given session + */ + public boolean unsetSession(SessionImplementor currentSession); + + /** + * Associate the collection with the given session. + * @return false if the collection was already associated with the session + * @throws HibernateException if the collection was already associated + * with another open session + */ + public boolean setCurrentSession(SessionImplementor session) + throws HibernateException; + + /** + * Read the state of the collection from a disassembled cached value + */ + public void initializeFromCache(CollectionPersister persister, + Serializable disassembled, Object owner) throws HibernateException; + + /** + * Iterate all collection entries, during update of the database + */ + public Iterator entries(CollectionPersister persister); + + /** + * Read a row from the JDBC result set + */ + public Object readFrom(ResultSet rs, CollectionPersister role, CollectionAliases descriptor, Object owner) + throws HibernateException, SQLException; + + /** + * Get the index of the given collection entry + */ + public Object getIdentifier(Object entry, int i); + + /** + * Get the index of the given collection entry + * @param persister it was more elegant before we added this... + */ + public Object getIndex(Object entry, int i, CollectionPersister persister); + + /** + * Get the value of the given collection entry + */ + public Object getElement(Object entry); + + /** + * Get the snapshot value of the given collection entry + */ + public Object getSnapshotElement(Object entry, int i); + + /** + * Called before any elements are read into the collection, + * allowing appropriate initializations to occur. + * + * @param persister The underlying collection persister. + * @param anticipatedSize The anticipated size of the collection after initilization is complete. + */ + public void beforeInitialize(CollectionPersister persister, int anticipatedSize); + + /** + * Does the current state exactly match the snapshot? + */ + public boolean equalsSnapshot(CollectionPersister persister) + throws HibernateException; + + /** + * Is the snapshot empty? + */ + public boolean isSnapshotEmpty(Serializable snapshot); + + /** + * Disassemble the collection, ready for the cache + */ + public Serializable disassemble(CollectionPersister persister) + throws HibernateException; + + /** + * Do we need to completely recreate this collection when it changes? + */ + public boolean needsRecreate(CollectionPersister persister); + + /** + * Return a new snapshot of the current state of the collection + */ + public Serializable getSnapshot(CollectionPersister persister) + throws HibernateException; + + /** + * To be called internally by the session, forcing + * immediate initialization. + */ + public void forceInitialization() throws HibernateException; + + /** + * Does an element exist at this entry in the collection? + */ + public boolean entryExists(Object entry, int i); //note that i parameter is now unused (delete it?) + + /** + * Do we need to insert this element? + */ + public boolean needsInserting(Object entry, int i, Type elemType) + throws HibernateException; + + /** + * Do we need to update this element? + */ + public boolean needsUpdating(Object entry, int i, Type elemType) + throws HibernateException; + + public boolean isRowUpdatePossible(); + + /** + * Get all the elements that need deleting + */ + public Iterator getDeletes(CollectionPersister persister, boolean indexIsFormula) + throws HibernateException; + + /** + * Is this the wrapper for the given underlying collection instance? + */ + public boolean isWrapper(Object collection); + + /** + * Is this instance initialized? + */ + public boolean wasInitialized(); + + /** + * Does this instance have any "queued" additions? + */ + public boolean hasQueuedOperations(); + + /** + * Iterate the "queued" additions + */ + public Iterator queuedAdditionIterator(); + + /** + * Get the "queued" orphans + */ + public Collection getQueuedOrphans(String entityName); + + /** + * Get the current collection key value + */ + public Serializable getKey(); + + /** + * Get the current role name + */ + public String getRole(); + + /** + * Is the collection unreferenced? + */ + public boolean isUnreferenced(); + + /** + * Is the collection dirty? Note that this is only + * reliable during the flush cycle, after the + * collection elements are dirty checked against + * the snapshot. + */ + public boolean isDirty(); + + /** + * Clear the dirty flag, after flushing changes + * to the database. + */ + public void clearDirty(); + + /** + * Get the snapshot cached by the collection + * instance + */ + public Serializable getStoredSnapshot(); + + /** + * Mark the collection as dirty + */ + public void dirty(); + + /** + * Called before inserting rows, to ensure that any surrogate keys + * are fully generated + */ + public void preInsert(CollectionPersister persister) + throws HibernateException; + + /** + * Called after inserting a row, to fetch the natively generated id + */ + public void afterRowInsert(CollectionPersister persister, Object entry, int i) + throws HibernateException; + + /** + * get all "orphaned" elements + */ + public Collection getOrphans(Serializable snapshot, String entityName) + throws HibernateException; + +} \ No newline at end of file diff --git a/src/org/hibernate/collection/PersistentElementHolder.java b/src/org/hibernate/collection/PersistentElementHolder.java new file mode 100755 index 0000000000..0ad090e1ea --- /dev/null +++ b/src/org/hibernate/collection/PersistentElementHolder.java @@ -0,0 +1,209 @@ +//$Id$ +package org.hibernate.collection; + +import java.io.Serializable; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +import org.dom4j.Element; +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.loader.CollectionAliases; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.Type; +import org.hibernate.util.CollectionHelper; + +/** + * A persistent wrapper for an XML element + * + * @author Gavin King + */ +public class PersistentElementHolder extends AbstractPersistentCollection { + protected Element element; + + public PersistentElementHolder(SessionImplementor session, Element element) { + super(session); + this.element = element; + setInitialized(); + } + + public Serializable getSnapshot(CollectionPersister persister) + throws HibernateException { + + final Type elementType = persister.getElementType(); + List elements = element.elements( persister.getElementNodeName() ); + ArrayList snapshot = new ArrayList( elements.size() ); + for ( int i=0; i= elements.size() ) { + result.add(old); + } + else { + Element elem = (Element) elements.get(i); + Object object = elementType.fromXMLNode( elem, persister.getFactory() ); + if ( elementType.isDirty( old, object, getSession() ) ) result.add(old); + } + } + return result.iterator(); + + } + + public boolean needsInserting(Object entry, int i, Type elementType) + throws HibernateException { + ArrayList snapshot = (ArrayList) getSnapshot(); + return i>=snapshot.size() || elementType.isDirty( snapshot.get(i), entry, getSession() ); + } + + public boolean needsUpdating(Object entry, int i, Type elementType) + throws HibernateException { + return false; + } + + public Object getIndex(Object entry, int i, CollectionPersister persister) { + throw new UnsupportedOperationException(); + } + + public Object getElement(Object entry) { + return entry; + } + + public Object getSnapshotElement(Object entry, int i) { + throw new UnsupportedOperationException(); + } + + public boolean entryExists(Object entry, int i) { + return entry!=null; + } + +} diff --git a/src/org/hibernate/collection/PersistentIdentifierBag.java b/src/org/hibernate/collection/PersistentIdentifierBag.java new file mode 100644 index 0000000000..dcdc5ad1d8 --- /dev/null +++ b/src/org/hibernate/collection/PersistentIdentifierBag.java @@ -0,0 +1,412 @@ +//$Id$ +package org.hibernate.collection; + +import java.io.Serializable; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; + +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.loader.CollectionAliases; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.Type; + +/** + * An IdentifierBag implements "bag" semantics more efficiently than + * a regular Bag by adding a synthetic identifier column to the + * table. This identifier is unique for all rows in the table, allowing very + * efficient updates and deletes. The value of the identifier is never exposed + * to the application.
    + *
    + * IdentifierBags may not be used for a many-to-one association. + * Furthermore, there is no reason to use inverse="true". + * + * @author Gavin King + */ +public class PersistentIdentifierBag extends AbstractPersistentCollection implements List { + + protected List values; //element + protected Map identifiers; //index -> id + + public PersistentIdentifierBag(SessionImplementor session) { + super(session); + } + + public PersistentIdentifierBag() {} //needed for SOAP libraries, etc + + public PersistentIdentifierBag(SessionImplementor session, Collection coll) { + super(session); + if (coll instanceof List) { + values = (List) coll; + } + else { + values = new ArrayList(); + Iterator iter = coll.iterator(); + while ( iter.hasNext() ) { + values.add( iter.next() ); + } + } + setInitialized(); + setDirectlyAccessible(true); + identifiers = new HashMap(); + } + + public void initializeFromCache(CollectionPersister persister, Serializable disassembled, Object owner) + throws HibernateException { + Serializable[] array = (Serializable[]) disassembled; + int size = array.length; + beforeInitialize( persister, size ); + for ( int i = 0; i < size; i+=2 ) { + identifiers.put( + new Integer(i/2), + persister.getIdentifierType().assemble( array[i], getSession(), owner ) + ); + values.add( persister.getElementType().assemble( array[i+1], getSession(), owner ) ); + } + } + + public Object getIdentifier(Object entry, int i) { + return identifiers.get( new Integer(i) ); + } + + public boolean isWrapper(Object collection) { + return values==collection; + } + + public boolean add(Object o) { + write(); + values.add(o); + return true; + } + + public void clear() { + initialize( true ); + if ( ! values.isEmpty() || ! identifiers.isEmpty() ) { + values.clear(); + identifiers.clear(); + dirty(); + } + } + + public boolean contains(Object o) { + read(); + return values.contains(o); + } + + public boolean containsAll(Collection c) { + read(); + return values.containsAll(c); + } + + public boolean isEmpty() { + return readSize() ? getCachedSize()==0 : values.isEmpty(); + } + + public Iterator iterator() { + read(); + return new IteratorProxy( values.iterator() ); + } + + public boolean remove(Object o) { + initialize( true ); + int index = values.indexOf(o); + if (index>=0) { + beforeRemove(index); + values.remove(index); + dirty(); + return true; + } + else { + return false; + } + } + + public boolean removeAll(Collection c) { + if ( c.size() > 0 ) { + boolean result = false; + Iterator iter = c.iterator(); + while ( iter.hasNext() ) { + if ( remove( iter.next() ) ) result=true; + } + return result; + } + else { + return false; + } + } + + public boolean retainAll(Collection c) { + initialize( true ); + if ( values.retainAll( c ) ) { + dirty(); + return true; + } + else { + return false; + } + } + + public int size() { + return readSize() ? getCachedSize() : values.size(); + } + + public Object[] toArray() { + read(); + return values.toArray(); + } + + public Object[] toArray(Object[] a) { + read(); + return values.toArray(a); + } + + public void beforeInitialize(CollectionPersister persister, int anticipatedSize) { + identifiers = anticipatedSize <= 0 ? new HashMap() : new HashMap( anticipatedSize + 1 + (int)( anticipatedSize * .75f ), .75f ); + values = anticipatedSize <= 0 ? new ArrayList() : new ArrayList( anticipatedSize ); + } + + public Serializable disassemble(CollectionPersister persister) + throws HibernateException { + Serializable[] result = new Serializable[ values.size() * 2 ]; + int i=0; + for (int j=0; j< values.size(); j++) { + Object value = values.get(j); + result[i++] = persister.getIdentifierType().disassemble( identifiers.get( new Integer(j) ), getSession(), null ); + result[i++] = persister.getElementType().disassemble( value, getSession(), null ); + } + return result; + } + + public boolean empty() { + return values.isEmpty(); + } + + public Iterator entries(CollectionPersister persister) { + return values.iterator(); + } + + public boolean entryExists(Object entry, int i) { + return entry!=null; + } + + public boolean equalsSnapshot(CollectionPersister persister) throws HibernateException { + Type elementType = persister.getElementType(); + Map snap = (Map) getSnapshot(); + if ( snap.size()!= values.size() ) return false; + for ( int i=0; i 0 ) { + Iterator iter = c.iterator(); + while ( iter.hasNext() ) { + add( index++, iter.next() ); + } + return true; + } + else { + return false; + } + } + + public Object get(int index) { + read(); + return values.get(index); + } + + public int indexOf(Object o) { + read(); + return values.indexOf(o); + } + + public int lastIndexOf(Object o) { + read(); + return values.lastIndexOf(o); + } + + public ListIterator listIterator() { + read(); + return new ListIteratorProxy( values.listIterator() ); + } + + public ListIterator listIterator(int index) { + read(); + return new ListIteratorProxy( values.listIterator(index) ); + } + + private void beforeRemove(int index) { + Object removedId = identifiers.get( new Integer(index) ); + int last = values.size()-1; + for ( int i=index; i 0 ) { + write(); + return values.addAll(c); + } + else { + return false; + } + } + + public void afterRowInsert( + CollectionPersister persister, + Object entry, + int i) + throws HibernateException { + //TODO: if we are using identity columns, fetch the identifier + } + +} diff --git a/src/org/hibernate/collection/PersistentIndexedElementHolder.java b/src/org/hibernate/collection/PersistentIndexedElementHolder.java new file mode 100755 index 0000000000..b475ffb719 --- /dev/null +++ b/src/org/hibernate/collection/PersistentIndexedElementHolder.java @@ -0,0 +1,228 @@ +//$Id$ +package org.hibernate.collection; + +import java.io.Serializable; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.dom4j.Element; +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.loader.CollectionAliases; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.NullableType; +import org.hibernate.type.Type; +import org.hibernate.util.CollectionHelper; + +/** + * A persistent wrapper for an XML element + * + * @author Gavin King + */ +public abstract class PersistentIndexedElementHolder extends AbstractPersistentCollection { + protected Element element; + + public PersistentIndexedElementHolder(SessionImplementor session, Element element) { + super(session); + this.element = element; + setInitialized(); + } + + public static final class IndexedValue { + String index; + Object value; + IndexedValue(String index, Object value) { + this.index = index; + this.value = value; + } + } + + protected static String getIndex(Element element, String indexNodeName, int i) { + if (indexNodeName!=null) { + return element.attributeValue(indexNodeName); + } + else { + return Integer.toString(i); + } + } + + protected static void setIndex(Element element, String indexNodeName, String index) { + if (indexNodeName!=null) element.addAttribute(indexNodeName, index); + } + + protected static String getIndexAttributeName(CollectionPersister persister) { + String node = persister.getIndexNodeName(); + return node==null ? null : node.substring(1); + } + + public Serializable getSnapshot(CollectionPersister persister) + throws HibernateException { + + final Type elementType = persister.getElementType(); + String indexNode = getIndexAttributeName(persister); + List elements = element.elements( persister.getElementNodeName() ); + HashMap snapshot = new HashMap( elements.size() ); + for ( int i=0; ijava.util.List. Underlying + * collection is an ArrayList. + * + * @see java.util.ArrayList + * @author Gavin King + */ +public class PersistentList extends AbstractPersistentCollection implements List { + + protected List list; + + public Serializable getSnapshot(CollectionPersister persister) throws HibernateException { + + EntityMode entityMode = getSession().getEntityMode(); + + ArrayList clonedList = new ArrayList( list.size() ); + Iterator iter = list.iterator(); + while ( iter.hasNext() ) { + Object deepCopy = persister.getElementType() + .deepCopy( iter.next(), entityMode, persister.getFactory() ); + clonedList.add( deepCopy ); + } + return clonedList; + } + + public Collection getOrphans(Serializable snapshot, String entityName) throws HibernateException { + List sn = (List) snapshot; + return getOrphans( sn, list, entityName, getSession() ); + } + + public boolean equalsSnapshot(CollectionPersister persister) throws HibernateException { + Type elementType = persister.getElementType(); + List sn = (List) getSnapshot(); + if ( sn.size()!=this.list.size() ) return false; + Iterator iter = list.iterator(); + Iterator sniter = sn.iterator(); + while ( iter.hasNext() ) { + if ( elementType.isDirty( iter.next(), sniter.next(), getSession() ) ) return false; + } + return true; + } + + public boolean isSnapshotEmpty(Serializable snapshot) { + return ( (Collection) snapshot ).isEmpty(); + } + + public PersistentList(SessionImplementor session) { + super(session); + } + + public PersistentList(SessionImplementor session, List list) { + super(session); + this.list = list; + setInitialized(); + setDirectlyAccessible(true); + } + + public void beforeInitialize(CollectionPersister persister, int anticipatedSize) { + this.list = ( List ) persister.getCollectionType().instantiate( anticipatedSize ); + } + + public boolean isWrapper(Object collection) { + return list==collection; + } + + public PersistentList() {} //needed for SOAP libraries, etc + + /** + * @see java.util.List#size() + */ + public int size() { + return readSize() ? getCachedSize() : list.size(); + } + + /** + * @see java.util.List#isEmpty() + */ + public boolean isEmpty() { + return readSize() ? getCachedSize()==0 : list.isEmpty(); + } + + /** + * @see java.util.List#contains(Object) + */ + public boolean contains(Object object) { + Boolean exists = readElementExistence(object); + return exists==null ? + list.contains(object) : + exists.booleanValue(); + } + + /** + * @see java.util.List#iterator() + */ + public Iterator iterator() { + read(); + return new IteratorProxy( list.iterator() ); + } + + /** + * @see java.util.List#toArray() + */ + public Object[] toArray() { + read(); + return list.toArray(); + } + + /** + * @see java.util.List#toArray(Object[]) + */ + public Object[] toArray(Object[] array) { + read(); + return list.toArray(array); + } + + /** + * @see java.util.List#add(Object) + */ + public boolean add(Object object) { + if ( !isOperationQueueEnabled() ) { + write(); + return list.add(object); + } + else { + queueOperation( new SimpleAdd(object) ); + return true; + } + } + + /** + * @see java.util.List#remove(Object) + */ + public boolean remove(Object value) { + Boolean exists = isPutQueueEnabled() ? readElementExistence(value) : null; + if ( exists == null ) { + initialize( true ); + if ( list.remove( value ) ) { + dirty(); + return true; + } + else { + return false; + } + } + else if ( exists.booleanValue() ) { + queueOperation( new SimpleRemove(value) ); + return true; + } + else { + return false; + } + } + + /** + * @see java.util.List#containsAll(Collection) + */ + public boolean containsAll(Collection coll) { + read(); + return list.containsAll(coll); + } + + /** + * @see java.util.List#addAll(Collection) + */ + public boolean addAll(Collection values) { + if ( values.size()==0 ) { + return false; + } + if ( !isOperationQueueEnabled() ) { + write(); + return list.addAll(values); + } + else { + Iterator iter = values.iterator(); + while ( iter.hasNext() ) { + queueOperation( new SimpleAdd( iter.next() ) ); + } + return values.size()>0; + } + } + + /** + * @see java.util.List#addAll(int, Collection) + */ + public boolean addAll(int index, Collection coll) { + if ( coll.size()>0 ) { + write(); + return list.addAll(index, coll); + } + else { + return false; + } + } + + /** + * @see java.util.List#removeAll(Collection) + */ + public boolean removeAll(Collection coll) { + if ( coll.size()>0 ) { + initialize( true ); + if ( list.removeAll( coll ) ) { + dirty(); + return true; + } + else { + return false; + } + } + else { + return false; + } + } + + /** + * @see java.util.List#retainAll(Collection) + */ + public boolean retainAll(Collection coll) { + initialize( true ); + if ( list.retainAll( coll ) ) { + dirty(); + return true; + } + else { + return false; + } + } + + /** + * @see java.util.List#clear() + */ + public void clear() { + if ( isClearQueueEnabled() ) { + queueOperation( new Clear() ); + } + else { + initialize( true ); + if ( ! list.isEmpty() ) { + list.clear(); + dirty(); + } + } + } + + /** + * @see java.util.List#get(int) + */ + public Object get(int index) { + if (index<0) { + throw new ArrayIndexOutOfBoundsException("negative index"); + } + Object result = readElementByIndex( new Integer(index) ); + return result==UNKNOWN ? list.get(index) : result; + } + + /** + * @see java.util.List#set(int, Object) + */ + public Object set(int index, Object value) { + if (index<0) { + throw new ArrayIndexOutOfBoundsException("negative index"); + } + Object old = isPutQueueEnabled() ? readElementByIndex( new Integer(index) ) : UNKNOWN; + if ( old==UNKNOWN ) { + write(); + return list.set(index, value); + } + else { + queueOperation( new Set(index, value, old) ); + return old; + } + } + + /** + * @see java.util.List#add(int, Object) + */ + public void add(int index, Object value) { + if (index<0) { + throw new ArrayIndexOutOfBoundsException("negative index"); + } + if ( !isOperationQueueEnabled() ) { + write(); + list.add(index, value); + } + else { + queueOperation( new Add(index, value) ); + } + } + + /** + * @see java.util.List#remove(int) + */ + public Object remove(int index) { + if (index<0) { + throw new ArrayIndexOutOfBoundsException("negative index"); + } + Object old = isPutQueueEnabled() ? + readElementByIndex( new Integer(index) ) : UNKNOWN; + if ( old==UNKNOWN ) { + write(); + return list.remove(index); + } + else { + queueOperation( new Remove(index, old) ); + return old; + } + } + + /** + * @see java.util.List#indexOf(Object) + */ + public int indexOf(Object value) { + read(); + return list.indexOf(value); + } + + /** + * @see java.util.List#lastIndexOf(Object) + */ + public int lastIndexOf(Object value) { + read(); + return list.lastIndexOf(value); + } + + /** + * @see java.util.List#listIterator() + */ + public ListIterator listIterator() { + read(); + return new ListIteratorProxy( list.listIterator() ); + } + + /** + * @see java.util.List#listIterator(int) + */ + public ListIterator listIterator(int index) { + read(); + return new ListIteratorProxy( list.listIterator(index) ); + } + + /** + * @see java.util.List#subList(int, int) + */ + public java.util.List subList(int from, int to) { + read(); + return new ListProxy( list.subList(from, to) ); + } + + public boolean empty() { + return list.isEmpty(); + } + + public String toString() { + read(); + return list.toString(); + } + + public Object readFrom(ResultSet rs, CollectionPersister persister, CollectionAliases descriptor, Object owner) + throws HibernateException, SQLException { + Object element = persister.readElement( rs, owner, descriptor.getSuffixedElementAliases(), getSession() ) ; + int index = ( (Integer) persister.readIndex( rs, descriptor.getSuffixedIndexAliases(), getSession() ) ).intValue(); + + //pad with nulls from the current last element up to the new index + for ( int i = list.size(); i<=index; i++) { + list.add(i, null); + } + + list.set(index, element); + return element; + } + + public Iterator entries(CollectionPersister persister) { + return list.iterator(); + } + + public void initializeFromCache(CollectionPersister persister, Serializable disassembled, Object owner) + throws HibernateException { + Serializable[] array = ( Serializable[] ) disassembled; + int size = array.length; + beforeInitialize( persister, size ); + for ( int i = 0; i < size; i++ ) { + list.add( persister.getElementType().assemble( array[i], getSession(), owner ) ); + } + } + + public Serializable disassemble(CollectionPersister persister) + throws HibernateException { + + int length = list.size(); + Serializable[] result = new Serializable[length]; + for ( int i=0; i list.size() ) { + for ( int i=list.size(); i= sn.size() || sn.get(i)==null ); + } + + public boolean needsUpdating(Object entry, int i, Type elemType) throws HibernateException { + final List sn = (List) getSnapshot(); + return ijava.util.Map. Underlying collection + * is a HashMap. + * + * @see java.util.HashMap + * @author Gavin King + */ +public class PersistentMap extends AbstractPersistentCollection implements Map { + + protected Map map; + + /** + * Empty constructor. + *

    + * Note: this form is not ever ever ever used by Hibernate; it is, however, + * needed for SOAP libraries and other such marshalling code. + */ + public PersistentMap() { + // intentionally empty + } + + /** + * Instantiates a lazy map (the underlying map is un-initialized). + * + * @param session The session to which this map will belong. + */ + public PersistentMap(SessionImplementor session) { + super(session); + } + + /** + * Instantiates a non-lazy map (the underlying map is constructed + * from the incoming map reference). + * + * @param session The session to which this map will belong. + * @param map The underlying map data. + */ + public PersistentMap(SessionImplementor session, Map map) { + super(session); + this.map = map; + setInitialized(); + setDirectlyAccessible(true); + } + + public Serializable getSnapshot(CollectionPersister persister) throws HibernateException { + EntityMode entityMode = getSession().getEntityMode(); + HashMap clonedMap = new HashMap( map.size() ); + Iterator iter = map.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry e = (Map.Entry) iter.next(); + final Object copy = persister.getElementType() + .deepCopy( e.getValue(), entityMode, persister.getFactory() ); + clonedMap.put( e.getKey(), copy ); + } + return clonedMap; + } + + public Collection getOrphans(Serializable snapshot, String entityName) throws HibernateException { + Map sn = (Map) snapshot; + return getOrphans( sn.values(), map.values(), entityName, getSession() ); + } + + public boolean equalsSnapshot(CollectionPersister persister) throws HibernateException { + Type elementType = persister.getElementType(); + Map xmap = (Map) getSnapshot(); + if ( xmap.size()!=this.map.size() ) return false; + Iterator iter = map.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry entry = (Map.Entry) iter.next(); + if ( elementType.isDirty( entry.getValue(), xmap.get( entry.getKey() ), getSession() ) ) return false; + } + return true; + } + + public boolean isSnapshotEmpty(Serializable snapshot) { + return ( (Map) snapshot ).isEmpty(); + } + + public boolean isWrapper(Object collection) { + return map==collection; + } + + public void beforeInitialize(CollectionPersister persister, int anticipatedSize) { + this.map = ( Map ) persister.getCollectionType().instantiate( anticipatedSize ); + } + + + /** + * @see java.util.Map#size() + */ + public int size() { + return readSize() ? getCachedSize() : map.size(); + } + + /** + * @see java.util.Map#isEmpty() + */ + public boolean isEmpty() { + return readSize() ? getCachedSize()==0 : map.isEmpty(); + } + + /** + * @see java.util.Map#containsKey(Object) + */ + public boolean containsKey(Object key) { + Boolean exists = readIndexExistence(key); + return exists==null ? map.containsKey(key) : exists.booleanValue(); + } + + /** + * @see java.util.Map#containsValue(Object) + */ + public boolean containsValue(Object value) { + Boolean exists = readElementExistence(value); + return exists==null ? + map.containsValue(value) : + exists.booleanValue(); + } + + /** + * @see java.util.Map#get(Object) + */ + public Object get(Object key) { + Object result = readElementByIndex(key); + return result==UNKNOWN ? map.get(key) : result; + } + + /** + * @see java.util.Map#put(Object, Object) + */ + public Object put(Object key, Object value) { + if ( isPutQueueEnabled() ) { + Object old = readElementByIndex( key ); + if ( old != UNKNOWN ) { + queueOperation( new Put( key, value, old ) ); + return old; + } + } + initialize( true ); + Object old = map.put( key, value ); + // would be better to use the element-type to determine + // whether the old and the new are equal here; the problem being + // we do not necessarily have access to the element type in all + // cases + if ( value != old ) { + dirty(); + } + return old; + } + + /** + * @see java.util.Map#remove(Object) + */ + public Object remove(Object key) { + if ( isPutQueueEnabled() ) { + Object old = readElementByIndex( key ); + queueOperation( new Remove( key, old ) ); + return old; + } + else { + // TODO : safe to interpret "map.remove(key) == null" as non-dirty? + initialize( true ); + if ( map.containsKey( key ) ) { + dirty(); + } + return map.remove( key ); + } + } + + /** + * @see java.util.Map#putAll(java.util.Map puts) + */ + public void putAll(Map puts) { + if ( puts.size()>0 ) { + initialize( true ); + Iterator itr = puts.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Entry ) itr.next(); + put( entry.getKey(), entry.getValue() ); + } + } + } + + /** + * @see java.util.Map#clear() + */ + public void clear() { + if ( isClearQueueEnabled() ) { + queueOperation( new Clear() ); + } + else { + initialize( true ); + if ( ! map.isEmpty() ) { + dirty(); + map.clear(); + } + } + } + + /** + * @see java.util.Map#keySet() + */ + public Set keySet() { + read(); + return new SetProxy( map.keySet() ); + } + + /** + * @see java.util.Map#values() + */ + public Collection values() { + read(); + return new SetProxy( map.values() ); + } + + /** + * @see java.util.Map#entrySet() + */ + public Set entrySet() { + read(); + return new EntrySetProxy( map.entrySet() ); + } + + public boolean empty() { + return map.isEmpty(); + } + + public String toString() { + read(); + return map.toString(); + } + + public Object readFrom(ResultSet rs, CollectionPersister persister, CollectionAliases descriptor, Object owner) + throws HibernateException, SQLException { + Object element = persister.readElement( rs, owner, descriptor.getSuffixedElementAliases(), getSession() ); + Object index = persister.readIndex( rs, descriptor.getSuffixedIndexAliases(), getSession() ); + if ( element!=null ) map.put(index, element); + return element; + } + + public Iterator entries(CollectionPersister persister) { + return map.entrySet().iterator(); + } + + /** a wrapper for Map.Entry sets */ + class EntrySetProxy implements Set { + private final Set set; + EntrySetProxy(Set set) { + this.set=set; + } + public boolean add(Object entry) { + //write(); -- doesn't + return set.add(entry); + } + public boolean addAll(Collection entries) { + //write(); -- doesn't + return set.addAll(entries); + } + public void clear() { + write(); + set.clear(); + } + public boolean contains(Object entry) { + return set.contains(entry); + } + public boolean containsAll(Collection entries) { + return set.containsAll(entries); + } + public boolean isEmpty() { + return set.isEmpty(); + } + public Iterator iterator() { + return new EntryIteratorProxy( set.iterator() ); + } + public boolean remove(Object entry) { + write(); + return set.remove(entry); + } + public boolean removeAll(Collection entries) { + write(); + return set.removeAll(entries); + } + public boolean retainAll(Collection entries) { + write(); + return set.retainAll(entries); + } + public int size() { + return set.size(); + } + // amazingly, these two will work because AbstractCollection + // uses iterator() to fill the array + public Object[] toArray() { + return set.toArray(); + } + public Object[] toArray(Object[] array) { + return set.toArray(array); + } + } + final class EntryIteratorProxy implements Iterator { + private final Iterator iter; + EntryIteratorProxy(Iterator iter) { + this.iter=iter; + } + public boolean hasNext() { + return iter.hasNext(); + } + public Object next() { + return new MapEntryProxy( (Map.Entry) iter.next() ); + } + public void remove() { + write(); + iter.remove(); + } + } + + final class MapEntryProxy implements Map.Entry { + private final Map.Entry me; + MapEntryProxy( Map.Entry me ) { + this.me = me; + } + public Object getKey() { return me.getKey(); } + public Object getValue() { return me.getValue(); } + public boolean equals(Object o) { return me.equals(o); } + public int hashCode() { return me.hashCode(); } + // finally, what it's all about... + public Object setValue(Object value) { + write(); + return me.setValue(value); + } + } + + public void initializeFromCache(CollectionPersister persister, Serializable disassembled, Object owner) + throws HibernateException { + Serializable[] array = ( Serializable[] ) disassembled; + int size = array.length; + beforeInitialize( persister, size ); + for ( int i = 0; i < size; i+=2 ) { + map.put( + persister.getIndexType().assemble( array[i], getSession(), owner ), + persister.getElementType().assemble( array[i+1], getSession(), owner ) + ); + } + } + + public Serializable disassemble(CollectionPersister persister) throws HibernateException { + + Serializable[] result = new Serializable[ map.size() * 2 ]; + Iterator iter = map.entrySet().iterator(); + int i=0; + while ( iter.hasNext() ) { + Map.Entry e = (Map.Entry) iter.next(); + result[i++] = persister.getIndexType().disassemble( e.getKey(), getSession(), null ); + result[i++] = persister.getElementType().disassemble( e.getValue(), getSession(), null ); + } + return result; + + } + + public Iterator getDeletes(CollectionPersister persister, boolean indexIsFormula) + throws HibernateException { + List deletes = new ArrayList(); + Iterator iter = ( (Map) getSnapshot() ).entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry e = (Map.Entry) iter.next(); + Object key = e.getKey(); + if ( e.getValue()!=null && map.get(key)==null ) { + deletes.add( indexIsFormula ? e.getValue() : key ); + } + } + return deletes.iterator(); + } + + public boolean needsInserting(Object entry, int i, Type elemType) + throws HibernateException { + final Map sn = (Map) getSnapshot(); + Map.Entry e = (Map.Entry) entry; + return e.getValue()!=null && sn.get( e.getKey() )==null; + } + + public boolean needsUpdating(Object entry, int i, Type elemType) + throws HibernateException { + final Map sn = (Map) getSnapshot(); + Map.Entry e = (Map.Entry) entry; + Object snValue = sn.get( e.getKey() ); + return e.getValue()!=null && + snValue!=null && + elemType.isDirty( snValue, e.getValue(), getSession() ); + } + + + public Object getIndex(Object entry, int i, CollectionPersister persister) { + return ( (Map.Entry) entry ).getKey(); + } + + public Object getElement(Object entry) { + return ( (Map.Entry) entry ).getValue(); + } + + public Object getSnapshotElement(Object entry, int i) { + final Map sn = (Map) getSnapshot(); + return sn.get( ( (Map.Entry) entry ).getKey() ); + } + + public boolean equals(Object other) { + read(); + return map.equals(other); + } + + public int hashCode() { + read(); + return map.hashCode(); + } + + public boolean entryExists(Object entry, int i) { + return ( (Map.Entry) entry ).getValue()!=null; + } + + final class Clear implements DelayedOperation { + public void operate() { + map.clear(); + } + public Object getAddedInstance() { + return null; + } + public Object getOrphan() { + throw new UnsupportedOperationException("queued clear cannot be used with orphan delete"); + } + } + + final class Put implements DelayedOperation { + private Object index; + private Object value; + private Object old; + + public Put(Object index, Object value, Object old) { + this.index = index; + this.value = value; + this.old = old; + } + public void operate() { + map.put(index, value); + } + public Object getAddedInstance() { + return value; + } + public Object getOrphan() { + return old; + } + } + + final class Remove implements DelayedOperation { + private Object index; + private Object old; + + public Remove(Object index, Object old) { + this.index = index; + this.old = old; + } + public void operate() { + map.remove(index); + } + public Object getAddedInstance() { + return null; + } + public Object getOrphan() { + return old; + } + } +} diff --git a/src/org/hibernate/collection/PersistentMapElementHolder.java b/src/org/hibernate/collection/PersistentMapElementHolder.java new file mode 100755 index 0000000000..d24dfbc4e9 --- /dev/null +++ b/src/org/hibernate/collection/PersistentMapElementHolder.java @@ -0,0 +1,71 @@ +//$Id$ +package org.hibernate.collection; + +import java.io.Serializable; +import java.util.List; + +import org.dom4j.Element; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.NullableType; +import org.hibernate.type.Type; + +/** + * @author Gavin King + */ +public class PersistentMapElementHolder extends PersistentIndexedElementHolder { + + public PersistentMapElementHolder(SessionImplementor session, Element element) { + super( session, element ); + } + + public PersistentMapElementHolder(SessionImplementor session, CollectionPersister persister, + Serializable key) throws HibernateException { + super( session, persister, key ); + } + + public void initializeFromCache(CollectionPersister persister, Serializable disassembled, Object owner) + throws HibernateException { + + Type elementType = persister.getElementType(); + Type indexType = persister.getIndexType(); + final String indexNodeName = getIndexAttributeName(persister); + + Serializable[] cached = (Serializable[]) disassembled; + + for ( int i=0; ijava.util.Set. The underlying + * collection is a HashSet. + * + * @see java.util.HashSet + * @author Gavin King + */ +public class PersistentSet extends AbstractPersistentCollection implements java.util.Set { + + protected Set set; + protected transient List tempList; + + /** + * Empty constructor. + *

    + * Note: this form is not ever ever ever used by Hibernate; it is, however, + * needed for SOAP libraries and other such marshalling code. + */ + public PersistentSet() { + // intentionally empty + } + + /** + * Constructor matching super. Instantiates a lazy set (the underlying + * set is un-initialized). + * + * @param session The session to which this set will belong. + */ + public PersistentSet(SessionImplementor session) { + super( session ); + } + + /** + * Instantiates a non-lazy set (the underlying set is constructed + * from the incoming set reference). + * + * @param session The session to which this set will belong. + * @param set The underlying set data. + */ + public PersistentSet(SessionImplementor session, java.util.Set set) { + super(session); + // Sets can be just a view of a part of another collection. + // do we need to copy it to be sure it won't be changing + // underneath us? + // ie. this.set.addAll(set); + this.set = set; + setInitialized(); + setDirectlyAccessible(true); + } + + + public Serializable getSnapshot(CollectionPersister persister) + throws HibernateException { + EntityMode entityMode = getSession().getEntityMode(); + + //if (set==null) return new Set(session); + HashMap clonedSet = new HashMap( set.size() ); + Iterator iter = set.iterator(); + while ( iter.hasNext() ) { + Object copied = persister.getElementType() + .deepCopy( iter.next(), entityMode, persister.getFactory() ); + clonedSet.put(copied, copied); + } + return clonedSet; + } + + public Collection getOrphans(Serializable snapshot, String entityName) throws HibernateException { + java.util.Map sn = (java.util.Map) snapshot; + return getOrphans( sn.keySet(), set, entityName, getSession() ); + } + + public boolean equalsSnapshot(CollectionPersister persister) throws HibernateException { + Type elementType = persister.getElementType(); + java.util.Map sn = (java.util.Map) getSnapshot(); + if ( sn.size()!=set.size() ) { + return false; + } + else { + Iterator iter = set.iterator(); + while ( iter.hasNext() ) { + Object test = iter.next(); + Object oldValue = sn.get(test); + if ( oldValue==null || elementType.isDirty( oldValue, test, getSession() ) ) return false; + } + return true; + } + } + + public boolean isSnapshotEmpty(Serializable snapshot) { + return ( (java.util.Map) snapshot ).isEmpty(); + } + + public void beforeInitialize(CollectionPersister persister, int anticipatedSize) { + this.set = ( Set ) persister.getCollectionType().instantiate( anticipatedSize ); + } + + public void initializeFromCache(CollectionPersister persister, Serializable disassembled, Object owner) + throws HibernateException { + Serializable[] array = ( Serializable[] ) disassembled; + int size = array.length; + beforeInitialize( persister, size ); + for (int i = 0; i < size; i++ ) { + Object element = persister.getElementType().assemble( array[i], getSession(), owner ); + if ( element != null ) { + set.add( element ); + } + } + } + + public boolean empty() { + return set.isEmpty(); + } + + /** + * @see java.util.Set#size() + */ + public int size() { + return readSize() ? getCachedSize() : set.size(); + } + + /** + * @see java.util.Set#isEmpty() + */ + public boolean isEmpty() { + return readSize() ? getCachedSize()==0 : set.isEmpty(); + } + + /** + * @see java.util.Set#contains(Object) + */ + public boolean contains(Object object) { + Boolean exists = readElementExistence(object); + return exists==null ? + set.contains(object) : + exists.booleanValue(); + } + + /** + * @see java.util.Set#iterator() + */ + public Iterator iterator() { + read(); + return new IteratorProxy( set.iterator() ); + } + + /** + * @see java.util.Set#toArray() + */ + public Object[] toArray() { + read(); + return set.toArray(); + } + + /** + * @see java.util.Set#toArray(Object[]) + */ + public Object[] toArray(Object[] array) { + read(); + return set.toArray(array); + } + + /** + * @see java.util.Set#add(Object) + */ + public boolean add(Object value) { + Boolean exists = isOperationQueueEnabled() ? readElementExistence( value ) : null; + if ( exists == null ) { + initialize( true ); + if ( set.add( value ) ) { + dirty(); + return true; + } + else { + return false; + } + } + else if ( exists.booleanValue() ) { + return false; + } + else { + queueOperation( new SimpleAdd(value) ); + return true; + } + } + + /** + * @see java.util.Set#remove(Object) + */ + public boolean remove(Object value) { + Boolean exists = isPutQueueEnabled() ? readElementExistence( value ) : null; + if ( exists==null ) { + initialize( true ); + if ( set.remove( value ) ) { + dirty(); + return true; + } + else { + return false; + } + } + else if ( exists.booleanValue() ) { + queueOperation( new SimpleRemove(value) ); + return true; + } + else { + return false; + } + } + + /** + * @see java.util.Set#containsAll(Collection) + */ + public boolean containsAll(Collection coll) { + read(); + return set.containsAll(coll); + } + + /** + * @see java.util.Set#addAll(Collection) + */ + public boolean addAll(Collection coll) { + if ( coll.size() > 0 ) { + initialize( true ); + if ( set.addAll( coll ) ) { + dirty(); + return true; + } + else { + return false; + } + } + else { + return false; + } + } + + /** + * @see java.util.Set#retainAll(Collection) + */ + public boolean retainAll(Collection coll) { + initialize( true ); + if ( set.retainAll( coll ) ) { + dirty(); + return true; + } + else { + return false; + } + } + + /** + * @see java.util.Set#removeAll(Collection) + */ + public boolean removeAll(Collection coll) { + if ( coll.size() > 0 ) { + initialize( true ); + if ( set.removeAll( coll ) ) { + dirty(); + return true; + } + else { + return false; + } + } + else { + return false; + } + } + + /** + * @see java.util.Set#clear() + */ + public void clear() { + if ( isClearQueueEnabled() ) { + queueOperation( new Clear() ); + } + else { + initialize( true ); + if ( !set.isEmpty() ) { + set.clear(); + dirty(); + } + } + } + + public String toString() { + //if (needLoading) return "asleep"; + read(); + return set.toString(); + } + + public Object readFrom( + ResultSet rs, + CollectionPersister persister, + CollectionAliases descriptor, + Object owner) throws HibernateException, SQLException { + Object element = persister.readElement( rs, owner, descriptor.getSuffixedElementAliases(), getSession() ); + if (element!=null) tempList.add(element); + return element; + } + + public void beginRead() { + super.beginRead(); + tempList = new ArrayList(); + } + + public boolean endRead() { + set.addAll(tempList); + tempList = null; + setInitialized(); + return true; + } + + public Iterator entries(CollectionPersister persister) { + return set.iterator(); + } + + public Serializable disassemble(CollectionPersister persister) + throws HibernateException { + + Serializable[] result = new Serializable[ set.size() ]; + Iterator iter = set.iterator(); + int i=0; + while ( iter.hasNext() ) { + result[i++] = persister.getElementType().disassemble( iter.next(), getSession(), null ); + } + return result; + + } + + public Iterator getDeletes(CollectionPersister persister, boolean indexIsFormula) throws HibernateException { + Type elementType = persister.getElementType(); + final java.util.Map sn = (java.util.Map) getSnapshot(); + ArrayList deletes = new ArrayList( sn.size() ); + Iterator iter = sn.keySet().iterator(); + while ( iter.hasNext() ) { + Object test = iter.next(); + if ( !set.contains(test) ) { + // the element has been removed from the set + deletes.add(test); + } + } + iter = set.iterator(); + while ( iter.hasNext() ) { + Object test = iter.next(); + Object oldValue = sn.get(test); + if ( oldValue!=null && elementType.isDirty( test, oldValue, getSession() ) ) { + // the element has changed + deletes.add(oldValue); + } + } + return deletes.iterator(); + } + + public boolean needsInserting(Object entry, int i, Type elemType) throws HibernateException { + final java.util.Map sn = (java.util.Map) getSnapshot(); + Object oldValue = sn.get(entry); + // note that it might be better to iterate the snapshot but this is safe, + // assuming the user implements equals() properly, as required by the Set + // contract! + return oldValue==null || elemType.isDirty( oldValue, entry, getSession() ); + } + + public boolean needsUpdating(Object entry, int i, Type elemType) { + return false; + } + + public boolean isRowUpdatePossible() { + return false; + } + + public Object getIndex(Object entry, int i, CollectionPersister persister) { + throw new UnsupportedOperationException("Sets don't have indexes"); + } + + public Object getElement(Object entry) { + return entry; + } + + public Object getSnapshotElement(Object entry, int i) { + throw new UnsupportedOperationException("Sets don't support updating by element"); + } + + public boolean equals(Object other) { + read(); + return set.equals(other); + } + + public int hashCode() { + read(); + return set.hashCode(); + } + + public boolean entryExists(Object key, int i) { + return true; + } + + public boolean isWrapper(Object collection) { + return set==collection; + } + + final class Clear implements DelayedOperation { + public void operate() { + set.clear(); + } + public Object getAddedInstance() { + return null; + } + public Object getOrphan() { + throw new UnsupportedOperationException("queued clear cannot be used with orphan delete"); + } + } + + final class SimpleAdd implements DelayedOperation { + private Object value; + + public SimpleAdd(Object value) { + this.value = value; + } + public void operate() { + set.add(value); + } + public Object getAddedInstance() { + return value; + } + public Object getOrphan() { + return null; + } + } + + final class SimpleRemove implements DelayedOperation { + private Object value; + + public SimpleRemove(Object value) { + this.value = value; + } + public void operate() { + set.remove(value); + } + public Object getAddedInstance() { + return null; + } + public Object getOrphan() { + return value; + } + } +} diff --git a/src/org/hibernate/collection/PersistentSortedMap.java b/src/org/hibernate/collection/PersistentSortedMap.java new file mode 100644 index 0000000000..a851eff13d --- /dev/null +++ b/src/org/hibernate/collection/PersistentSortedMap.java @@ -0,0 +1,191 @@ +//$Id$ +package org.hibernate.collection; + + +import java.io.Serializable; +import java.util.Collection; +import java.util.Comparator; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; + +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.BasicCollectionPersister; + + +/** + * A persistent wrapper for a java.util.SortedMap. Underlying + * collection is a TreeMap. + * + * @see java.util.TreeMap + * @author e + */ +public class PersistentSortedMap extends PersistentMap implements SortedMap { + + protected Comparator comparator; + + protected Serializable snapshot(BasicCollectionPersister persister, EntityMode entityMode) throws HibernateException { + TreeMap clonedMap = new TreeMap(comparator); + Iterator iter = map.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry e = (Map.Entry) iter.next(); + clonedMap.put( e.getKey(), persister.getElementType().deepCopy( e.getValue(), entityMode, persister.getFactory() ) ); + } + return clonedMap; + } + + public PersistentSortedMap(SessionImplementor session) { + super(session); + } + + public void setComparator(Comparator comparator) { + this.comparator = comparator; + } + + public PersistentSortedMap(SessionImplementor session, SortedMap map) { + super(session, map); + comparator = map.comparator(); + } + + public PersistentSortedMap() {} //needed for SOAP libraries, etc + + /** + * @see PersistentSortedMap#comparator() + */ + public Comparator comparator() { + return comparator; + } + + /** + * @see PersistentSortedMap#subMap(Object, Object) + */ + public SortedMap subMap(Object fromKey, Object toKey) { + read(); + SortedMap m = ( (SortedMap) map ).subMap(fromKey, toKey); + return new SortedSubMap(m); + } + + /** + * @see PersistentSortedMap#headMap(Object) + */ + public SortedMap headMap(Object toKey) { + read(); + SortedMap m; + m = ( (SortedMap) map ).headMap(toKey); + return new SortedSubMap(m); + } + + /** + * @see PersistentSortedMap#tailMap(Object) + */ + public SortedMap tailMap(Object fromKey) { + read(); + SortedMap m; + m = ( (SortedMap) map ).tailMap(fromKey); + return new SortedSubMap(m); + } + + /** + * @see PersistentSortedMap#firstKey() + */ + public Object firstKey() { + read(); + return ( (SortedMap) map ).firstKey(); + } + + /** + * @see PersistentSortedMap#lastKey() + */ + public Object lastKey() { + read(); + return ( (SortedMap) map ).lastKey(); + } + + class SortedSubMap implements SortedMap { + + SortedMap submap; + + SortedSubMap(SortedMap m) { + this.submap = m; + } + // from Map + public int size() { + return submap.size(); + } + public boolean isEmpty() { + return submap.isEmpty(); + } + public boolean containsKey(Object key) { + return submap.containsKey(key); + } + public boolean containsValue(Object key) { + return submap.containsValue(key) ; + } + public Object get(Object key) { + return submap.get(key); + } + public Object put(Object key, Object value) { + write(); + return submap.put(key, value); + } + public Object remove(Object key) { + write(); + return submap.remove(key); + } + public void putAll(Map other) { + write(); + submap.putAll(other); + } + public void clear() { + write(); + submap.clear(); + } + public Set keySet() { + return new SetProxy( submap.keySet() ); + } + public Collection values() { + return new SetProxy( submap.values() ); + } + public Set entrySet() { + return new EntrySetProxy( submap.entrySet() ); + } + // from SortedMap + public Comparator comparator() { + return submap.comparator(); + } + public SortedMap subMap(Object fromKey, Object toKey) { + SortedMap m; + m = submap.subMap(fromKey, toKey); + return new SortedSubMap( m ); + } + public SortedMap headMap(Object toKey) { + SortedMap m; + m = submap.headMap(toKey); + return new SortedSubMap(m); + } + public SortedMap tailMap(Object fromKey) { + SortedMap m; + m = submap.tailMap(fromKey); + return new SortedSubMap(m); + } + public Object firstKey() { + return submap.firstKey(); + } + public Object lastKey() { + return submap.lastKey(); + } + + } + +} + + + + + + + diff --git a/src/org/hibernate/collection/PersistentSortedSet.java b/src/org/hibernate/collection/PersistentSortedSet.java new file mode 100644 index 0000000000..d354e592fa --- /dev/null +++ b/src/org/hibernate/collection/PersistentSortedSet.java @@ -0,0 +1,145 @@ +//$Id$ +package org.hibernate.collection; + +import java.io.Serializable; +import java.util.Comparator; +import java.util.Iterator; +import java.util.SortedSet; +import java.util.TreeMap; + +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.collection.BasicCollectionPersister; + + +/** + * A persistent wrapper for a java.util.SortedSet. Underlying + * collection is a TreeSet. + * + * @see java.util.TreeSet + * @author e + */ +public class PersistentSortedSet extends PersistentSet implements SortedSet { + + protected Comparator comparator; + + protected Serializable snapshot(BasicCollectionPersister persister, EntityMode entityMode) + throws HibernateException { + //if (set==null) return new Set(session); + TreeMap clonedSet = new TreeMap(comparator); + Iterator iter = set.iterator(); + while ( iter.hasNext() ) { + Object copy = persister.getElementType().deepCopy( iter.next(), entityMode, persister.getFactory() ); + clonedSet.put(copy, copy); + } + return clonedSet; + } + + public void setComparator(Comparator comparator) { + this.comparator = comparator; + } + + public PersistentSortedSet(SessionImplementor session) { + super(session); + } + + public PersistentSortedSet(SessionImplementor session, SortedSet set) { + super(session, set); + comparator = set.comparator(); + } + + public PersistentSortedSet() {} //needed for SOAP libraries, etc + + /** + * @see PersistentSortedSet#comparator() + */ + public Comparator comparator() { + return comparator; + } + + /** + * @see PersistentSortedSet#subSet(Object,Object) + */ + public SortedSet subSet(Object fromElement, Object toElement) { + read(); + SortedSet s; + s = ( (SortedSet) set ).subSet(fromElement, toElement); + return new SubSetProxy(s); + } + + /** + * @see PersistentSortedSet#headSet(Object) + */ + public SortedSet headSet(Object toElement) { + read(); + SortedSet s = ( (SortedSet) set ).headSet(toElement); + return new SubSetProxy(s); + } + + /** + * @see PersistentSortedSet#tailSet(Object) + */ + public SortedSet tailSet(Object fromElement) { + read(); + SortedSet s = ( (SortedSet) set ).tailSet(fromElement); + return new SubSetProxy(s); + } + + /** + * @see PersistentSortedSet#first() + */ + public Object first() { + read(); + return ( (SortedSet) set ).first(); + } + + /** + * @see PersistentSortedSet#last() + */ + public Object last() { + read(); + return ( (SortedSet) set ).last(); + } + + /** wrapper for subSets to propagate write to its backing set */ + class SubSetProxy extends SetProxy implements SortedSet { + + SubSetProxy(SortedSet s) { + super(s); + } + + public Comparator comparator() { + return ( (SortedSet) this.set ).comparator(); + } + + public Object first() { + return ( (SortedSet) this.set ).first(); + } + + public SortedSet headSet(Object toValue) { + return new SubSetProxy( ( (SortedSet) this.set ).headSet(toValue) ); + } + + public Object last() { + return ( (SortedSet) this.set ).last(); + } + + public SortedSet subSet(Object fromValue, Object toValue) { + return new SubSetProxy( ( (SortedSet) this.set ).subSet(fromValue, toValue) ); + } + + public SortedSet tailSet(Object fromValue) { + return new SubSetProxy( ( (SortedSet) this.set ).tailSet(fromValue) ); + } + + } + +} + + + + + + + diff --git a/src/org/hibernate/collection/package.html b/src/org/hibernate/collection/package.html new file mode 100755 index 0000000000..c9ec7f7dc8 --- /dev/null +++ b/src/org/hibernate/collection/package.html @@ -0,0 +1,8 @@ + + + +

    + This package defines a framework for collection wrappers. +

    + + diff --git a/src/org/hibernate/connection/C3P0ConnectionProvider.java b/src/org/hibernate/connection/C3P0ConnectionProvider.java new file mode 100644 index 0000000000..dab2c30118 --- /dev/null +++ b/src/org/hibernate/connection/C3P0ConnectionProvider.java @@ -0,0 +1,218 @@ +//$Id$ +package org.hibernate.connection; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.Properties; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import com.mchange.v2.c3p0.DataSources; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.util.PropertiesHelper; +import org.hibernate.util.ReflectHelper; + +/** + * A connection provider that uses a C3P0 connection pool. Hibernate will use this by + * default if the hibernate.c3p0.* properties are set. + * + * @author various people + * @see ConnectionProvider + */ +public class C3P0ConnectionProvider implements ConnectionProvider { + + private static final Log log = LogFactory.getLog( C3P0ConnectionProvider.class ); + + //swaldman 2006-08-28: define c3p0-style configuration parameters for properties with + // hibernate-specific overrides to detect and warn about conflicting + // declarations + private final static String C3P0_STYLE_MIN_POOL_SIZE = "c3p0.minPoolSize"; + private final static String C3P0_STYLE_MAX_POOL_SIZE = "c3p0.maxPoolSize"; + private final static String C3P0_STYLE_MAX_IDLE_TIME = "c3p0.maxIdleTime"; + private final static String C3P0_STYLE_MAX_STATEMENTS = "c3p0.maxStatements"; + private final static String C3P0_STYLE_ACQUIRE_INCREMENT = "c3p0.acquireIncrement"; + private final static String C3P0_STYLE_IDLE_CONNECTION_TEST_PERIOD = "c3p0.idleConnectionTestPeriod"; + private final static String C3P0_STYLE_TEST_CONNECTION_ON_CHECKOUT = "c3p0.testConnectionOnCheckout"; + + //swaldman 2006-08-28: define c3p0-style configuration parameters for initialPoolSize, which + // hibernate sensibly lets default to minPoolSize, but we'll let users + // override it with the c3p0-style property if they want. + private final static String C3P0_STYLE_INITIAL_POOL_SIZE = "c3p0.initialPoolSize"; + + private DataSource ds; + private Integer isolation; + private boolean autocommit; + + /** + * {@inheritDoc} + */ + public Connection getConnection() throws SQLException { + final Connection c = ds.getConnection(); + if ( isolation != null ) { + c.setTransactionIsolation( isolation.intValue() ); + } + if ( c.getAutoCommit() != autocommit ) { + c.setAutoCommit( autocommit ); + } + return c; + } + + /** + * {@inheritDoc} + */ + public void closeConnection(Connection conn) throws SQLException { + conn.close(); + } + + /** + * {@inheritDoc} + */ + public void configure(Properties props) throws HibernateException { + String jdbcDriverClass = props.getProperty( Environment.DRIVER ); + String jdbcUrl = props.getProperty( Environment.URL ); + Properties connectionProps = ConnectionProviderFactory.getConnectionProperties( props ); + + log.info( "C3P0 using driver: " + jdbcDriverClass + " at URL: " + jdbcUrl ); + log.info( "Connection properties: " + PropertiesHelper.maskOut( connectionProps, "password" ) ); + + autocommit = PropertiesHelper.getBoolean( Environment.AUTOCOMMIT, props ); + log.info( "autocommit mode: " + autocommit ); + + if ( jdbcDriverClass == null ) { + log.warn( "No JDBC Driver class was specified by property " + Environment.DRIVER ); + } + else { + try { + Class.forName( jdbcDriverClass ); + } + catch ( ClassNotFoundException cnfe ) { + try { + ReflectHelper.classForName( jdbcDriverClass ); + } + catch ( ClassNotFoundException e ) { + String msg = "JDBC Driver class not found: " + jdbcDriverClass; + log.fatal( msg, e ); + throw new HibernateException( msg, e ); + } + } + } + + try { + + //swaldman 2004-02-07: modify to allow null values to signify fall through to c3p0 PoolConfig defaults + Integer minPoolSize = PropertiesHelper.getInteger( Environment.C3P0_MIN_SIZE, props ); + Integer maxPoolSize = PropertiesHelper.getInteger( Environment.C3P0_MAX_SIZE, props ); + Integer maxIdleTime = PropertiesHelper.getInteger( Environment.C3P0_TIMEOUT, props ); + Integer maxStatements = PropertiesHelper.getInteger( Environment.C3P0_MAX_STATEMENTS, props ); + Integer acquireIncrement = PropertiesHelper.getInteger( Environment.C3P0_ACQUIRE_INCREMENT, props ); + Integer idleTestPeriod = PropertiesHelper.getInteger( Environment.C3P0_IDLE_TEST_PERIOD, props ); + + Properties c3props = new Properties(); + + // turn hibernate.c3p0.* into c3p0.*, so c3p0 + // gets a chance to see all hibernate.c3p0.* + for ( Iterator ii = props.keySet().iterator(); ii.hasNext(); ) { + String key = ( String ) ii.next(); + if ( key.startsWith( "hibernate.c3p0." ) ) { + String newKey = key.substring( 10 ); + if ( props.containsKey( newKey ) ) { + warnPropertyConflict( key, newKey ); + } + c3props.put( newKey, props.get( key ) ); + } + } + + setOverwriteProperty( Environment.C3P0_MIN_SIZE, C3P0_STYLE_MIN_POOL_SIZE, props, c3props, minPoolSize ); + setOverwriteProperty( Environment.C3P0_MAX_SIZE, C3P0_STYLE_MAX_POOL_SIZE, props, c3props, maxPoolSize ); + setOverwriteProperty( Environment.C3P0_TIMEOUT, C3P0_STYLE_MAX_IDLE_TIME, props, c3props, maxIdleTime ); + setOverwriteProperty( + Environment.C3P0_MAX_STATEMENTS, C3P0_STYLE_MAX_STATEMENTS, props, c3props, maxStatements + ); + setOverwriteProperty( + Environment.C3P0_ACQUIRE_INCREMENT, C3P0_STYLE_ACQUIRE_INCREMENT, props, c3props, acquireIncrement + ); + setOverwriteProperty( + Environment.C3P0_IDLE_TEST_PERIOD, C3P0_STYLE_IDLE_CONNECTION_TEST_PERIOD, props, c3props, idleTestPeriod + ); + + // revert to traditional hibernate behavior of setting initialPoolSize to minPoolSize + // unless otherwise specified with a c3p0.*-style parameter. + Integer initialPoolSize = PropertiesHelper.getInteger( C3P0_STYLE_INITIAL_POOL_SIZE, props ); + if ( initialPoolSize == null && minPoolSize != null ) { + c3props.put( C3P0_STYLE_INITIAL_POOL_SIZE, String.valueOf( minPoolSize ).trim() ); + } + + /*DataSource unpooled = DataSources.unpooledDataSource( + jdbcUrl, props.getProperty(Environment.USER), props.getProperty(Environment.PASS) + );*/ + DataSource unpooled = DataSources.unpooledDataSource( jdbcUrl, connectionProps ); + + Properties allProps = ( Properties ) props.clone(); + allProps.putAll( c3props ); + + ds = DataSources.pooledDataSource( unpooled, allProps ); + } + catch ( Exception e ) { + log.fatal( "could not instantiate C3P0 connection pool", e ); + throw new HibernateException( "Could not instantiate C3P0 connection pool", e ); + } + + String i = props.getProperty( Environment.ISOLATION ); + if ( i == null ) { + isolation = null; + } + else { + isolation = new Integer( i ); + log.info( "JDBC isolation level: " + Environment.isolationLevelToString( isolation.intValue() ) ); + } + + } + + /** + * {@inheritDoc} + */ + public void close() { + try { + DataSources.destroy( ds ); + } + catch ( SQLException sqle ) { + log.warn( "could not destroy C3P0 connection pool", sqle ); + } + } + + /** + * {@inheritDoc} + */ + public boolean supportsAggressiveRelease() { + return false; + } + + private void setOverwriteProperty(String hibernateStyleKey, String c3p0StyleKey, Properties hibp, Properties c3p, Integer value) { + if ( value != null ) { + c3p.put( c3p0StyleKey, String.valueOf( value ).trim() ); + if ( hibp.getProperty( c3p0StyleKey ) != null ) { + warnPropertyConflict( hibernateStyleKey, c3p0StyleKey ); + } + String longC3p0StyleKey = "hibernate." + c3p0StyleKey; + if ( hibp.getProperty( longC3p0StyleKey ) != null ) { + warnPropertyConflict( hibernateStyleKey, longC3p0StyleKey ); + } + } + } + + private void warnPropertyConflict(String hibernateStyle, String c3p0Style) { + log.warn( + "Both hibernate-style property '" + hibernateStyle + + "' and c3p0-style property '" + c3p0Style + + "' have been set in hibernate.properties. " + + "Hibernate-style property '" + hibernateStyle + "' will be used " + + "and c3p0-style property '" + c3p0Style + "' will be ignored!" + ); + } +} diff --git a/src/org/hibernate/connection/ConnectionProvider.java b/src/org/hibernate/connection/ConnectionProvider.java new file mode 100644 index 0000000000..f84b0b548d --- /dev/null +++ b/src/org/hibernate/connection/ConnectionProvider.java @@ -0,0 +1,71 @@ +//$Id$ +package org.hibernate.connection; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; + +import org.hibernate.HibernateException; + +/** + * A strategy for obtaining JDBC connections. + *

    + * Implementors might also implement connection pooling.
    + *
    + * The ConnectionProvider interface is not intended to be + * exposed to the application. Instead it is used internally by + * Hibernate to obtain connections.
    + *
    + * Implementors should provide a public default constructor. + * + * @see ConnectionProviderFactory + * @author Gavin King + */ +public interface ConnectionProvider { + /** + * Initialize the connection provider from given properties. + * @param props SessionFactory properties + */ + public void configure(Properties props) throws HibernateException; + /** + * Grab a connection, with the autocommit mode specified by + * hibernate.connection.autocommit. + * @return a JDBC connection + * @throws SQLException + */ + public Connection getConnection() throws SQLException; + /** + * Dispose of a used connection. + * @param conn a JDBC connection + * @throws SQLException + */ + public void closeConnection(Connection conn) throws SQLException; + + /** + * Release all resources held by this provider. JavaDoc requires a second sentence. + * @throws HibernateException + */ + public void close() throws HibernateException; + + /** + * Does this connection provider support aggressive release of JDBC + * connections and re-acquistion of those connections (if need be) later? + *

    + * This is used in conjunction with {@link org.hibernate.cfg.Environment.RELEASE_CONNECTIONS} + * to aggressively release JDBC connections. However, the configured ConnectionProvider + * must support re-acquisition of the same underlying connection for that semantic to work. + *

    + * Typically, this is only true in managed environments where a container + * tracks connections by transaction or thread. + * + * Note that JTA semantic depends on the fact that the underlying connection provider does + * support aggressive release. + */ + public boolean supportsAggressiveRelease(); +} + + + + + + + diff --git a/src/org/hibernate/connection/ConnectionProviderFactory.java b/src/org/hibernate/connection/ConnectionProviderFactory.java new file mode 100644 index 0000000000..4cb8ea81a6 --- /dev/null +++ b/src/org/hibernate/connection/ConnectionProviderFactory.java @@ -0,0 +1,174 @@ +//$Id$ +package org.hibernate.connection; + +import java.util.HashSet; +import java.util.Iterator; +import java.util.Properties; +import java.util.Set; +import java.util.Map; +import java.beans.Introspector; +import java.beans.BeanInfo; +import java.beans.IntrospectionException; +import java.beans.PropertyDescriptor; +import java.lang.reflect.Method; +import java.lang.reflect.InvocationTargetException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.util.ReflectHelper; + +/** + * Instantiates a connection provider given either System properties or + * a java.util.Properties instance. The ConnectionProviderFactory + * first attempts to find a name of a ConnectionProvider subclass in the + * property hibernate.connection.provider_class. If missing, heuristics are used + * to choose either DriverManagerConnectionProvider, + * DatasourceConnectionProvider, C3P0ConnectionProvider or + * DBCPConnectionProvider. + * @see ConnectionProvider + * @author Gavin King + */ + +public final class ConnectionProviderFactory { + + private static final Log log = LogFactory.getLog(ConnectionProviderFactory.class); + + /** + * Instantiate a ConnectionProvider using System properties. + * @return ConnectionProvider + * @throws HibernateException + */ + public static ConnectionProvider newConnectionProvider() throws HibernateException { + return newConnectionProvider( Environment.getProperties() ); + } + + /** + * Instantiate a ConnectionProvider using given properties. + * Method newConnectionProvider. + * @param properties hibernate SessionFactory properties + * @return ConnectionProvider + * @throws HibernateException + */ + public static ConnectionProvider newConnectionProvider(Properties properties) throws HibernateException { + return newConnectionProvider( properties, null ); + } + + /** + * Instantiate a ConnectionProvider using given properties. + * Method newConnectionProvider. + * @param properties hibernate SessionFactory properties + * @Param connectionProviderInjectionData object to be injected in the conenction provided + * @return ConnectionProvider + * @throws HibernateException + */ + public static ConnectionProvider newConnectionProvider(Properties properties, Map connectionProviderInjectionData) throws HibernateException { + ConnectionProvider connections; + String providerClass = properties.getProperty(Environment.CONNECTION_PROVIDER); + if ( providerClass!=null ) { + try { + log.info("Initializing connection provider: " + providerClass); + connections = (ConnectionProvider) ReflectHelper.classForName(providerClass).newInstance(); + } + catch (Exception e) { + log.fatal("Could not instantiate connection provider", e); + throw new HibernateException("Could not instantiate connection provider: " + providerClass); + } + } + else if ( properties.getProperty(Environment.DATASOURCE)!=null ) { + connections = new DatasourceConnectionProvider(); + } + else if ( properties.getProperty(Environment.C3P0_MAX_SIZE)!=null ) { + connections = new C3P0ConnectionProvider(); + } + else if ( + properties.getProperty(Environment.PROXOOL_XML)!=null || + properties.getProperty(Environment.PROXOOL_PROPERTIES)!=null || + properties.getProperty(Environment.PROXOOL_EXISTING_POOL)!=null + ) { + connections = new ProxoolConnectionProvider(); + } + else if ( properties.getProperty(Environment.URL)!=null ) { + connections = new DriverManagerConnectionProvider(); + } + else { + connections = new UserSuppliedConnectionProvider(); + } + + if ( connectionProviderInjectionData != null && connectionProviderInjectionData.size() != 0 ) { + //inject the data + try { + BeanInfo info = Introspector.getBeanInfo( connections.getClass() ); + PropertyDescriptor[] descritors = info.getPropertyDescriptors(); + int size = descritors.length; + for (int index = 0 ; index < size ; index++) { + String propertyName = descritors[index].getName(); + if ( connectionProviderInjectionData.containsKey( propertyName ) ) { + Method method = descritors[index].getWriteMethod(); + method.invoke( connections, new Object[] { connectionProviderInjectionData.get( propertyName ) } ); + } + } + } + catch (IntrospectionException e) { + throw new HibernateException("Unable to inject objects into the conenction provider", e); + } + catch (IllegalAccessException e) { + throw new HibernateException("Unable to inject objects into the conenction provider", e); + } + catch (InvocationTargetException e) { + throw new HibernateException("Unable to inject objects into the conenction provider", e); + } + } + connections.configure(properties); + return connections; + } + + // cannot be instantiated + private ConnectionProviderFactory() { throw new UnsupportedOperationException(); } + + /** + * Transform JDBC connection properties. + * + * Passed in the form hibernate.connection.* to the + * format accepted by DriverManager by triming the leading "hibernate.connection". + */ + public static Properties getConnectionProperties(Properties properties) { + + Iterator iter = properties.keySet().iterator(); + Properties result = new Properties(); + while ( iter.hasNext() ) { + String prop = (String) iter.next(); + if ( prop.indexOf(Environment.CONNECTION_PREFIX) > -1 && !SPECIAL_PROPERTIES.contains(prop) ) { + result.setProperty( + prop.substring( Environment.CONNECTION_PREFIX.length()+1 ), + properties.getProperty(prop) + ); + } + } + String userName = properties.getProperty(Environment.USER); + if (userName!=null) result.setProperty( "user", userName ); + return result; + } + + private static final Set SPECIAL_PROPERTIES; + static { + SPECIAL_PROPERTIES = new HashSet(); + SPECIAL_PROPERTIES.add(Environment.DATASOURCE); + SPECIAL_PROPERTIES.add(Environment.URL); + SPECIAL_PROPERTIES.add(Environment.CONNECTION_PROVIDER); + SPECIAL_PROPERTIES.add(Environment.POOL_SIZE); + SPECIAL_PROPERTIES.add(Environment.ISOLATION); + SPECIAL_PROPERTIES.add(Environment.DRIVER); + SPECIAL_PROPERTIES.add(Environment.USER); + + } + +} + + + + + + diff --git a/src/org/hibernate/connection/DatasourceConnectionProvider.java b/src/org/hibernate/connection/DatasourceConnectionProvider.java new file mode 100644 index 0000000000..e791994651 --- /dev/null +++ b/src/org/hibernate/connection/DatasourceConnectionProvider.java @@ -0,0 +1,93 @@ +//$Id$ +package org.hibernate.connection; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; + +import javax.sql.DataSource; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.util.NamingHelper; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * A connection provider that uses a DataSource registered with JNDI. + * Hibernate will use this ConnectionProvider by default if the + * property hibernate.connection.datasource is set. + * @see ConnectionProvider + * @author Gavin King + */ +public class DatasourceConnectionProvider implements ConnectionProvider { + private DataSource ds; + private String user; + private String pass; + + private static final Log log = LogFactory.getLog(DatasourceConnectionProvider.class); + + public DataSource getDataSource() { + return ds; + } + + public void setDataSource(DataSource ds) { + this.ds = ds; + } + + public void configure(Properties props) throws HibernateException { + + String jndiName = props.getProperty(Environment.DATASOURCE); + if (jndiName==null) { + String msg = "datasource JNDI name was not specified by property " + Environment.DATASOURCE; + log.fatal(msg); + throw new HibernateException(msg); + } + + user = props.getProperty(Environment.USER); + pass = props.getProperty(Environment.PASS); + + try { + ds = (DataSource) NamingHelper.getInitialContext(props).lookup(jndiName); + } + catch (Exception e) { + log.fatal( "Could not find datasource: " + jndiName, e ); + throw new HibernateException( "Could not find datasource", e ); + } + if (ds==null) { + throw new HibernateException( "Could not find datasource: " + jndiName ); + } + log.info( "Using datasource: " + jndiName ); + } + + public Connection getConnection() throws SQLException { + if (user != null || pass != null) { + return ds.getConnection(user, pass); + } + else { + return ds.getConnection(); + } + } + + public void closeConnection(Connection conn) throws SQLException { + conn.close(); + } + + public void close() {} + + /** + * @see ConnectionProvider#supportsAggressiveRelease() + */ + public boolean supportsAggressiveRelease() { + return true; + } + +} + + + + + + + diff --git a/src/org/hibernate/connection/DriverManagerConnectionProvider.java b/src/org/hibernate/connection/DriverManagerConnectionProvider.java new file mode 100644 index 0000000000..ce0971f479 --- /dev/null +++ b/src/org/hibernate/connection/DriverManagerConnectionProvider.java @@ -0,0 +1,176 @@ +//$Id$ +package org.hibernate.connection; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.util.PropertiesHelper; +import org.hibernate.util.ReflectHelper; + +/** + * A connection provider that uses java.sql.DriverManager. This provider + * also implements a very rudimentary connection pool. + * @see ConnectionProvider + * @author Gavin King + */ +public class DriverManagerConnectionProvider implements ConnectionProvider { + + private String url; + private Properties connectionProps; + private Integer isolation; + private final ArrayList pool = new ArrayList(); + private int poolSize; + private int checkedOut = 0; + private boolean autocommit; + + private static final Log log = LogFactory.getLog(DriverManagerConnectionProvider.class); + + public void configure(Properties props) throws HibernateException { + + String driverClass = props.getProperty(Environment.DRIVER); + + poolSize = PropertiesHelper.getInt(Environment.POOL_SIZE, props, 20); //default pool size 20 + log.info("Using Hibernate built-in connection pool (not for production use!)"); + log.info("Hibernate connection pool size: " + poolSize); + + autocommit = PropertiesHelper.getBoolean(Environment.AUTOCOMMIT, props); + log.info("autocommit mode: " + autocommit); + + isolation = PropertiesHelper.getInteger(Environment.ISOLATION, props); + if (isolation!=null) + log.info( "JDBC isolation level: " + Environment.isolationLevelToString( isolation.intValue() ) ); + + if (driverClass==null) { + log.warn("no JDBC Driver class was specified by property " + Environment.DRIVER); + } + else { + try { + // trying via forName() first to be as close to DriverManager's semantics + Class.forName(driverClass); + } + catch (ClassNotFoundException cnfe) { + try { + ReflectHelper.classForName(driverClass); + } + catch (ClassNotFoundException e) { + String msg = "JDBC Driver class not found: " + driverClass; + log.fatal(msg, e); + throw new HibernateException(msg, e); + } + } + } + + url = props.getProperty(Environment.URL); + if (url==null) { + String msg = "JDBC URL was not specified by property " + Environment.URL; + log.fatal(msg); + throw new HibernateException(msg); + } + + connectionProps = ConnectionProviderFactory.getConnectionProperties(props); + + log.info( "using driver: " + driverClass + " at URL: " + url ); + // if debug level is enabled, then log the password, otherwise mask it + if ( log.isDebugEnabled() ) { + log.info( "connection properties: " + connectionProps ); + } + else if ( log.isInfoEnabled() ) { + log.info( "connection properties: " + PropertiesHelper.maskOut(connectionProps, "password") ); + } + + } + + public Connection getConnection() throws SQLException { + + if ( log.isTraceEnabled() ) log.trace( "total checked-out connections: " + checkedOut ); + + synchronized (pool) { + if ( !pool.isEmpty() ) { + int last = pool.size() - 1; + if ( log.isTraceEnabled() ) { + log.trace("using pooled JDBC connection, pool size: " + last); + checkedOut++; + } + Connection pooled = (Connection) pool.remove(last); + if (isolation!=null) pooled.setTransactionIsolation( isolation.intValue() ); + if ( pooled.getAutoCommit()!=autocommit ) pooled.setAutoCommit(autocommit); + return pooled; + } + } + + log.debug("opening new JDBC connection"); + Connection conn = DriverManager.getConnection(url, connectionProps); + if (isolation!=null) conn.setTransactionIsolation( isolation.intValue() ); + if ( conn.getAutoCommit()!=autocommit ) conn.setAutoCommit(autocommit); + + if ( log.isDebugEnabled() ) { + log.debug( "created connection to: " + url + ", Isolation Level: " + conn.getTransactionIsolation() ); + } + if ( log.isTraceEnabled() ) checkedOut++; + + return conn; + } + + public void closeConnection(Connection conn) throws SQLException { + + if ( log.isDebugEnabled() ) checkedOut--; + + synchronized (pool) { + int currentSize = pool.size(); + if ( currentSize < poolSize ) { + if ( log.isTraceEnabled() ) log.trace("returning connection to pool, pool size: " + (currentSize + 1) ); + pool.add(conn); + return; + } + } + + log.debug("closing JDBC connection"); + + conn.close(); + + } + + protected void finalize() { + close(); + } + + public void close() { + + log.info("cleaning up connection pool: " + url); + + Iterator iter = pool.iterator(); + while ( iter.hasNext() ) { + try { + ( (Connection) iter.next() ).close(); + } + catch (SQLException sqle) { + log.warn("problem closing pooled connection", sqle); + } + } + pool.clear(); + + } + + /** + * @see ConnectionProvider#supportsAggressiveRelease() + */ + public boolean supportsAggressiveRelease() { + return false; + } + +} + + + + + + + diff --git a/src/org/hibernate/connection/ProxoolConnectionProvider.java b/src/org/hibernate/connection/ProxoolConnectionProvider.java new file mode 100644 index 0000000000..28b86b6b4a --- /dev/null +++ b/src/org/hibernate/connection/ProxoolConnectionProvider.java @@ -0,0 +1,199 @@ +//$Id$ +package org.hibernate.connection; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Properties; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.util.PropertiesHelper; +import org.hibernate.util.StringHelper; +import org.hibernate.util.ConfigHelper; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.logicalcobwebs.proxool.ProxoolException; +import org.logicalcobwebs.proxool.ProxoolFacade; +import org.logicalcobwebs.proxool.configuration.JAXPConfigurator; +import org.logicalcobwebs.proxool.configuration.PropertyConfigurator; + +/** + * A connection provider that uses a Proxool connection pool. Hibernate will use this by + * default if the hibernate.proxool.* properties are set. + * @see ConnectionProvider + */ +public class ProxoolConnectionProvider implements ConnectionProvider { + + + private static final String PROXOOL_JDBC_STEM = "proxool."; + + private static final Log log = LogFactory.getLog(ProxoolConnectionProvider.class); + + private String proxoolAlias; + + // TRUE if the pool is borrowed from the outside, FALSE if we used to create it + private boolean existingPool; + + // Not null if the Isolation level has been specified in the configuration file. + // Otherwise, it is left to the Driver's default value. + private Integer isolation; + + private boolean autocommit; + + /** + * Grab a connection + * @return a JDBC connection + * @throws SQLException + */ + public Connection getConnection() throws SQLException { + // get a connection from the pool (thru DriverManager, cfr. Proxool doc) + Connection c = DriverManager.getConnection(proxoolAlias); + + // set the Transaction Isolation if defined + if (isolation!=null) c.setTransactionIsolation( isolation.intValue() ); + + // toggle autoCommit to false if set + if ( c.getAutoCommit()!=autocommit ) c.setAutoCommit(autocommit); + + // return the connection + return c; + } + + /** + * Dispose of a used connection. + * @param conn a JDBC connection + * @throws SQLException + */ + public void closeConnection(Connection conn) throws SQLException { + conn.close(); + } + + /** + * Initialize the connection provider from given properties. + * @param props SessionFactory properties + */ + public void configure(Properties props) throws HibernateException { + + // Get the configurator files (if available) + String jaxpFile = props.getProperty(Environment.PROXOOL_XML); + String propFile = props.getProperty(Environment.PROXOOL_PROPERTIES); + String externalConfig = props.getProperty(Environment.PROXOOL_EXISTING_POOL); + + // Default the Proxool alias setting + proxoolAlias = props.getProperty(Environment.PROXOOL_POOL_ALIAS); + + // Configured outside of Hibernate (i.e. Servlet container, or Java Bean Container + // already has Proxool pools running, and this provider is to just borrow one of these + if ( "true".equals(externalConfig) ) { + + // Validate that an alias name was provided to determine which pool to use + if ( !StringHelper.isNotEmpty(proxoolAlias) ) { + String msg = "Cannot configure Proxool Provider to use an existing in memory pool without the " + Environment.PROXOOL_POOL_ALIAS + " property set."; + log.fatal(msg); + throw new HibernateException(msg); + } + // Append the stem to the proxool pool alias + proxoolAlias = PROXOOL_JDBC_STEM + proxoolAlias; + + // Set the existing pool flag to true + existingPool = true; + + log.info("Configuring Proxool Provider using existing pool in memory: " + proxoolAlias); + + // Configured using the JAXP Configurator + } + else if ( StringHelper.isNotEmpty(jaxpFile) ) { + + log.info("Configuring Proxool Provider using JAXPConfigurator: " + jaxpFile); + + // Validate that an alias name was provided to determine which pool to use + if ( !StringHelper.isNotEmpty(proxoolAlias) ) { + String msg = "Cannot configure Proxool Provider to use JAXP without the " + Environment.PROXOOL_POOL_ALIAS + " property set."; + log.fatal(msg); + throw new HibernateException(msg); + } + + try { + JAXPConfigurator.configure( ConfigHelper.getConfigStreamReader(jaxpFile), false ); + } + catch (ProxoolException e) { + String msg = "Proxool Provider unable to load JAXP configurator file: " + jaxpFile; + log.fatal(msg, e); + throw new HibernateException(msg, e); + } + + // Append the stem to the proxool pool alias + proxoolAlias = PROXOOL_JDBC_STEM + proxoolAlias; + log.info("Configuring Proxool Provider to use pool alias: " + proxoolAlias); + + // Configured using the Properties File Configurator + } + else if ( StringHelper.isNotEmpty(propFile) ) { + + log.info("Configuring Proxool Provider using Properties File: " + propFile); + + // Validate that an alias name was provided to determine which pool to use + if ( !StringHelper.isNotEmpty(proxoolAlias) ) { + String msg = "Cannot configure Proxool Provider to use Properties File without the " + Environment.PROXOOL_POOL_ALIAS + " property set."; + log.fatal(msg); + throw new HibernateException(msg); + } + + try { + PropertyConfigurator.configure( ConfigHelper.getConfigProperties(propFile) ); + } + catch (ProxoolException e) { + String msg = "Proxool Provider unable to load load Property configurator file: " + propFile; + log.fatal(msg, e); + throw new HibernateException(msg, e); + } + + // Append the stem to the proxool pool alias + proxoolAlias = PROXOOL_JDBC_STEM + proxoolAlias; + log.info("Configuring Proxool Provider to use pool alias: " + proxoolAlias); + } + + // Remember Isolation level + isolation = PropertiesHelper.getInteger(Environment.ISOLATION, props); + if (isolation!=null) { + log.info("JDBC isolation level: " + Environment.isolationLevelToString( isolation.intValue() ) ); + } + + autocommit = PropertiesHelper.getBoolean(Environment.AUTOCOMMIT, props); + log.info("autocommit mode: " + autocommit); + } + + /** + * Release all resources held by this provider. JavaDoc requires a second sentence. + * @throws HibernateException + */ + public void close() throws HibernateException { + + // If the provider was leeching off an existing pool don't close it + if (existingPool) { + return; + } + + // We have created the pool ourselves, so shut it down + try { + ProxoolFacade.shutdown(0); + } + catch (Exception e) { + // If you're closing down the ConnectionProvider chances are an + // is not a real big deal, just warn + log.warn("Exception occured when closing the Proxool pool", e); + throw new HibernateException("Exception occured when closing the Proxool pool", e); + } + } + + /** + * @see ConnectionProvider#supportsAggressiveRelease() + */ + public boolean supportsAggressiveRelease() { + return false; + } + +} diff --git a/src/org/hibernate/connection/UserSuppliedConnectionProvider.java b/src/org/hibernate/connection/UserSuppliedConnectionProvider.java new file mode 100644 index 0000000000..587bcd692d --- /dev/null +++ b/src/org/hibernate/connection/UserSuppliedConnectionProvider.java @@ -0,0 +1,56 @@ +//$Id$ +package org.hibernate.connection; + +import java.sql.Connection; +import java.util.Properties; + +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; + +/** + * An implementation of the ConnectionProvider interface that + * simply throws an exception when a connection is requested. This implementation + * indicates that the user is expected to supply a JDBC connection. + * @see ConnectionProvider + * @author Gavin King + */ +public class UserSuppliedConnectionProvider implements ConnectionProvider { + + /** + * @see org.hibernate.connection.ConnectionProvider#configure(Properties) + */ + public void configure(Properties props) throws HibernateException { + LogFactory.getLog(UserSuppliedConnectionProvider.class).warn("No connection properties specified - the user must supply JDBC connections"); + } + + /** + * @see org.hibernate.connection.ConnectionProvider#getConnection() + */ + public Connection getConnection() { + throw new UnsupportedOperationException("The user must supply a JDBC connection"); + } + + /** + * @see org.hibernate.connection.ConnectionProvider#closeConnection(Connection) + */ + public void closeConnection(Connection conn) { + throw new UnsupportedOperationException("The user must supply a JDBC connection"); + } + + public void close() { + } + + /** + * @see ConnectionProvider#supportsAggressiveRelease() + */ + public boolean supportsAggressiveRelease() { + return false; + } + +} + + + + + + diff --git a/src/org/hibernate/connection/package.html b/src/org/hibernate/connection/package.html new file mode 100755 index 0000000000..61333d1487 --- /dev/null +++ b/src/org/hibernate/connection/package.html @@ -0,0 +1,13 @@ + + + +

    + This package abstracts the mechanism for obtaining + a JDBC connection. +

    +

    + A concrete implementation of ConnectionProvider may be + selected by specifying hibernate.connection.provider_class. +

    + + diff --git a/src/org/hibernate/context/CurrentSessionContext.java b/src/org/hibernate/context/CurrentSessionContext.java new file mode 100644 index 0000000000..3e6f6a5e39 --- /dev/null +++ b/src/org/hibernate/context/CurrentSessionContext.java @@ -0,0 +1,37 @@ +package org.hibernate.context; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for implementations which know how to + * scope the notion of a {@link org.hibernate.SessionFactory#getCurrentSession() current session}. + *

    + * Implementations should adhere to the following: + *

      + *
    • contain a constructor accepting a single argument of type + * {@link org.hibernate.engine.SessionFactoryImplementor} + *
    • should be thread safe + *
    • should be fully serializable + *
    + *

    + * Implementors should be aware that they are also fully responsible for + * cleanup of any generated current-sessions. + *

    + * Note that there will be exactly one instance of the configured + * CurrentSessionContext implementation per {@link org.hibernate.SessionFactory}. + * + * @author Steve Ebersole + */ +public interface CurrentSessionContext extends Serializable { + /** + * Retrieve the current session according to the scoping defined + * by this implementation. + * + * @return The current session. + * @throws org.hibernate.HibernateException Typically indicates an issue + * locating or creating the current session. + */ + public org.hibernate.classic.Session currentSession() throws HibernateException; +} diff --git a/src/org/hibernate/context/JTASessionContext.java b/src/org/hibernate/context/JTASessionContext.java new file mode 100644 index 0000000000..daf8a5d2b4 --- /dev/null +++ b/src/org/hibernate/context/JTASessionContext.java @@ -0,0 +1,168 @@ +package org.hibernate.context; + +import org.hibernate.HibernateException; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.classic.Session; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.util.JTAHelper; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import javax.transaction.Transaction; +import javax.transaction.TransactionManager; +import javax.transaction.Synchronization; +import java.util.Map; +import java.util.Hashtable; + +/** + * An implementation of {@link CurrentSessionContext} which scopes the notion + * of a current session to a JTA transaction. Because JTA gives us a nice + * tie-in to clean up after ourselves, this implementation will generate + * Sessions as needed provided a JTA transaction is in effect. If a session + * is not already associated with the current JTA transaction at the time + * {@link #currentSession()} is called, a new session will be opened and it + * will be associated with that JTA transaction. + *

    + * Note that the sessions returned from this method are automatically configured with + * both the {@link org.hibernate.cfg.Environment#FLUSH_BEFORE_COMPLETION auto-flush} and + * {@link org.hibernate.cfg.Environment#AUTO_CLOSE_SESSION auto-close} attributes set to + * true, meaning that the Session will be automatically flushed and closed + * as part of the lifecycle for the JTA transaction to which it is associated. + * Additionally, it will also be configured to aggressively release JDBC + * connections after each statement is executed. These settings are governed + * by the {@link #isAutoFlushEnabled()}, {@link #isAutoCloseEnabled()}, and + * {@link #getConnectionReleaseMode()} methods; these are provided (along with + * the {@link #buildOrObtainSession()} method) for easier subclassing for custom + * JTA-based session tracking logic (like maybe long-session semantics). + * + * @author Steve Ebersole + */ +public class JTASessionContext implements CurrentSessionContext { + + private static final Log log = LogFactory.getLog( JTASessionContext.class ); + + protected final SessionFactoryImplementor factory; + private transient Map currentSessionMap = new Hashtable(); + + public JTASessionContext(SessionFactoryImplementor factory) { + this.factory = factory; + } + + public Session currentSession() throws HibernateException { + TransactionManager transactionManager = factory.getTransactionManager(); + if ( transactionManager == null ) { + throw new HibernateException( "No TransactionManagerLookup specified" ); + } + + Transaction txn = null; + try { + txn = transactionManager.getTransaction(); + if ( txn == null ) { + throw new HibernateException( "Unable to locate current JTA transaction" ); + } + if ( !JTAHelper.isInProgress( txn.getStatus() ) ) { + // We could register the session against the transaction even though it is + // not started, but we'd have no guarentee of ever getting the map + // entries cleaned up (aside from spawning threads). + throw new HibernateException( "Current transaction is not in progress" ); + } + } + catch ( HibernateException e ) { + throw e; + } + catch ( Throwable t ) { + throw new HibernateException( "Problem locating/validating JTA transaction", t ); + } + + Session currentSession = ( Session ) currentSessionMap.get( txn ); + + if ( currentSession == null ) { + currentSession = buildOrObtainSession(); + + try { + txn.registerSynchronization( buildCleanupSynch( txn ) ); + } + catch ( Throwable t ) { + try { + currentSession.close(); + } + catch ( Throwable ignore ) { + log.debug( "Unable to release generated current-session on failed synch registration", ignore ); + } + throw new HibernateException( "Unable to register cleanup Synchronization with TransactionManager" ); + } + + currentSessionMap.put( txn, currentSession ); + } + + return currentSession; + } + + private CleanupSynch buildCleanupSynch(Transaction txn) { + return new CleanupSynch( txn, this ); + } + + /** + * Strictly provided for subclassing purposes; specifically to allow long-session + * support. + *

    + * This implementation always just opens a new session. + * + * @return the built or (re)obtained session. + */ + protected Session buildOrObtainSession() { + return factory.openSession( + null, + isAutoFlushEnabled(), + isAutoCloseEnabled(), + getConnectionReleaseMode() + ); + } + + /** + * Mainly for subclass usage. This impl always returns true. + * + * @return Whether or not the the session should be closed by transaction completion. + */ + protected boolean isAutoCloseEnabled() { + return true; + } + + /** + * Mainly for subclass usage. This impl always returns true. + * + * @return Whether or not the the session should be flushed prior transaction completion. + */ + protected boolean isAutoFlushEnabled() { + return true; + } + + /** + * Mainly for subclass usage. This impl always returns after_statement. + * + * @return The connection release mode for any built sessions. + */ + protected ConnectionReleaseMode getConnectionReleaseMode() { + return ConnectionReleaseMode.AFTER_STATEMENT; + } + + /** + * JTA transaction synch used for cleanup of the internal session map. + */ + protected static class CleanupSynch implements Synchronization { + private Transaction txn; + private JTASessionContext context; + + public CleanupSynch(Transaction txn, JTASessionContext context) { + this.txn = txn; + this.context = context; + } + + public void beforeCompletion() { + } + + public void afterCompletion(int i) { + context.currentSessionMap.remove( txn ); + } + } +} diff --git a/src/org/hibernate/context/ManagedSessionContext.java b/src/org/hibernate/context/ManagedSessionContext.java new file mode 100644 index 0000000000..b1bd29e2ed --- /dev/null +++ b/src/org/hibernate/context/ManagedSessionContext.java @@ -0,0 +1,125 @@ +package org.hibernate.context; + +import org.hibernate.classic.Session; +import org.hibernate.HibernateException; +import org.hibernate.SessionFactory; +import org.hibernate.engine.SessionFactoryImplementor; + +import java.util.Map; +import java.util.HashMap; + +/** + * Represents a {@link CurrentSessionContext} the notion of a contextual session + * is managed by some external entity (generally some form of interceptor, etc). + * This external manager is responsible for scoping these contextual sessions + * appropriately binding/unbinding them here for exposure to the application + * through {@link SessionFactory#getCurrentSession} calls. + *

    + * Basically exposes two interfaces.

      + *
    • First is the implementation of CurrentSessionContext which is then used + * by the {@link SessionFactory#getCurrentSession()} calls. This + * portion is instance-based specific to the session factory owning the given + * instance of this impl (there will be one instance of this per each session + * factory using this strategy). + *
    • Second is the externally facing methods {@link #hasBind}, {@link #bind}, + * and {@link #unbind} used by the external thing to manage exposure of the + * current session it is scoping. This portion is static to allow easy + * reference from that external thing. + *
    + * The underlying storage of the current sessions here is a static + * {@link ThreadLocal}-based map where the sessions are keyed by the + * the owning session factory. + * + * @author Steve Ebersole + */ +public class ManagedSessionContext implements CurrentSessionContext { + + private static final ThreadLocal context = new ThreadLocal(); + private final SessionFactoryImplementor factory; + + public ManagedSessionContext(SessionFactoryImplementor factory) { + this.factory = factory; + } + + /** + * @see CurrentSessionContext#currentSession + */ + public Session currentSession() { + Session current = existingSession( factory ); + if ( current == null ) { + throw new HibernateException( "No session currently bound to execution context" ); + } + return current; + } + + /** + * Check to see if there is already a session associated with the current + * thread for the given session factory. + * + * @param factory The factory against which to check for a given session + * within the current thread. + * @return True if there is currently a session bound. + */ + public static boolean hasBind(SessionFactory factory) { + return existingSession( factory ) != null; + } + + /** + * Binds the given session to the current context for its session factory. + * + * @param session The session to be bound. + * @return Any previously bound session (should be null in most cases). + */ + public static Session bind(Session session) { + return ( Session ) sessionMap( true ).put( session.getSessionFactory(), session ); + } + + /** + * Unbinds the session (if one) current associated with the context for the + * given session. + * + * @param factory The factory for which to unbind the current session. + * @return The bound session if one, else null. + */ + public static Session unbind(SessionFactory factory) { + Session existing = null; + Map sessionMap = sessionMap(); + if ( sessionMap != null ) { + existing = ( Session ) sessionMap.remove( factory ); + doCleanup(); + } + return existing; + } + + private static Session existingSession(SessionFactory factory) { + Map sessionMap = sessionMap(); + if ( sessionMap == null ) { + return null; + } + else { + return ( Session ) sessionMap.get( factory ); + } + } + + protected static Map sessionMap() { + return sessionMap( false ); + } + + private static synchronized Map sessionMap(boolean createMap) { + Map sessionMap = ( Map ) context.get(); + if ( sessionMap == null && createMap ) { + sessionMap = new HashMap(); + context.set( sessionMap ); + } + return sessionMap; + } + + private static synchronized void doCleanup() { + Map sessionMap = sessionMap( false ); + if ( sessionMap != null ) { + if ( sessionMap.isEmpty() ) { + context.set( null ); + } + } + } +} diff --git a/src/org/hibernate/context/ThreadLocalSessionContext.java b/src/org/hibernate/context/ThreadLocalSessionContext.java new file mode 100644 index 0000000000..4330213452 --- /dev/null +++ b/src/org/hibernate/context/ThreadLocalSessionContext.java @@ -0,0 +1,339 @@ +package org.hibernate.context; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.util.HashMap; +import java.util.Map; +import javax.transaction.Synchronization; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.HibernateException; +import org.hibernate.SessionFactory; +import org.hibernate.classic.Session; +import org.hibernate.engine.SessionFactoryImplementor; + +/** + * A {@link CurrentSessionContext} impl which scopes the notion of current + * session by the current thread of execution. Unlike the JTA counterpart, + * threads do not give us a nice hook to perform any type of cleanup making + * it questionable for this impl to actually generate Session instances. In + * the interest of usability, it was decided to have this default impl + * actually generate a session upon first request and then clean it up + * after the {@link org.hibernate.Transaction} associated with that session + * is committed/rolled-back. In order for ensuring that happens, the sessions + * generated here are unusable until after {@link Session#beginTransaction()} + * has been called. If close() is called on a session managed by + * this class, it will be automatically unbound. + *

    + * Additionally, the static {@link #bind} and {@link #unbind} methods are + * provided to allow application code to explicitly control opening and + * closing of these sessions. This, with some from of interception, + * is the preferred approach. It also allows easy framework integration + * and one possible approach for implementing long-sessions. + *

    + * The {@link #buildOrObtainSession}, {@link #isAutoCloseEnabled}, + * {@link #isAutoFlushEnabled}, {@link #getConnectionReleaseMode}, and + * {@link #buildCleanupSynch} methods are all provided to allow easy + * subclassing (for long- running session scenarios, for example). + * + * @author Steve Ebersole + */ +public class ThreadLocalSessionContext implements CurrentSessionContext { + + private static final Log log = LogFactory.getLog( ThreadLocalSessionContext.class ); + private static final Class[] SESS_PROXY_INTERFACES = new Class[] { + org.hibernate.classic.Session.class, + org.hibernate.engine.SessionImplementor.class, + org.hibernate.jdbc.JDBCContext.Context.class, + org.hibernate.event.EventSource.class + }; + + /** + * A ThreadLocal maintaining current sessions for the given execution thread. + * The actual ThreadLocal variable is a java.util.Map to account for + * the possibility for multiple SessionFactorys being used during execution + * of the given thread. + */ + private static final ThreadLocal context = new ThreadLocal(); + + protected final SessionFactoryImplementor factory; + + public ThreadLocalSessionContext(SessionFactoryImplementor factory) { + this.factory = factory; + } + + public final Session currentSession() throws HibernateException { + Session current = existingSession( factory ); + if (current == null) { + current = buildOrObtainSession(); + // register a cleanup synch + current.getTransaction().registerSynchronization( buildCleanupSynch() ); + // wrap the session in the transaction-protection proxy + if ( needsWrapping( current ) ) { + current = wrap( current ); + } + // then bind it + doBind( current, factory ); + } + return current; + } + + private boolean needsWrapping(Session session) { + // try to make sure we don't wrap and already wrapped session + return session != null + && ! Proxy.isProxyClass( session.getClass() ) + || ( Proxy.getInvocationHandler( session ) != null + && ! ( Proxy.getInvocationHandler( session ) instanceof TransactionProtectionWrapper ) ); + } + + protected SessionFactoryImplementor getFactory() { + return factory; + } + + /** + * Strictly provided for subclassing purposes; specifically to allow long-session + * support. + *

    + * This implementation always just opens a new session. + * + * @return the built or (re)obtained session. + */ + protected Session buildOrObtainSession() { + return factory.openSession( + null, + isAutoFlushEnabled(), + isAutoCloseEnabled(), + getConnectionReleaseMode() + ); + } + + protected CleanupSynch buildCleanupSynch() { + return new CleanupSynch( factory ); + } + + /** + * Mainly for subclass usage. This impl always returns true. + * + * @return Whether or not the the session should be closed by transaction completion. + */ + protected boolean isAutoCloseEnabled() { + return true; + } + + /** + * Mainly for subclass usage. This impl always returns true. + * + * @return Whether or not the the session should be flushed prior transaction completion. + */ + protected boolean isAutoFlushEnabled() { + return true; + } + + /** + * Mainly for subclass usage. This impl always returns after_transaction. + * + * @return The connection release mode for any built sessions. + */ + protected ConnectionReleaseMode getConnectionReleaseMode() { + return factory.getSettings().getConnectionReleaseMode(); + } + + protected Session wrap(Session session) { + TransactionProtectionWrapper wrapper = new TransactionProtectionWrapper( session ); + Session wrapped = ( Session ) Proxy.newProxyInstance( + Session.class.getClassLoader(), + SESS_PROXY_INTERFACES, + wrapper + ); + // yick! need this for proper serialization/deserialization handling... + wrapper.setWrapped( wrapped ); + return wrapped; + } + + /** + * Associates the given session with the current thread of execution. + * + * @param session The session to bind. + */ + public static void bind(org.hibernate.Session session) { + SessionFactory factory = session.getSessionFactory(); + cleanupAnyOrphanedSession( factory ); + doBind( session, factory ); + } + + private static void cleanupAnyOrphanedSession(SessionFactory factory) { + Session orphan = doUnbind( factory, false ); + if ( orphan != null ) { + log.warn( "Already session bound on call to bind(); make sure you clean up your sessions!" ); + try { + if ( orphan.getTransaction() != null && orphan.getTransaction().isActive() ) { + try { + orphan.getTransaction().rollback(); + } + catch( Throwable t ) { + log.debug( "Unable to rollback transaction for orphaned session", t ); + } + } + orphan.close(); + } + catch( Throwable t ) { + log.debug( "Unable to close orphaned session", t ); + } + } + } + + /** + * Unassociate a previously bound session from the current thread of execution. + * + * @return The session which was unbound. + */ + public static Session unbind(SessionFactory factory) { + return doUnbind( factory, true ); + } + + private static Session existingSession(SessionFactory factory) { + Map sessionMap = sessionMap(); + if ( sessionMap == null ) { + return null; + } + else { + return ( Session ) sessionMap.get( factory ); + } + } + + protected static Map sessionMap() { + return ( Map ) context.get(); + } + + private static void doBind(org.hibernate.Session session, SessionFactory factory) { + Map sessionMap = sessionMap(); + if ( sessionMap == null ) { + sessionMap = new HashMap(); + context.set( sessionMap ); + } + sessionMap.put( factory, session ); + } + + private static Session doUnbind(SessionFactory factory, boolean releaseMapIfEmpty) { + Map sessionMap = sessionMap(); + Session session = null; + if ( sessionMap != null ) { + session = ( Session ) sessionMap.remove( factory ); + if ( releaseMapIfEmpty && sessionMap.isEmpty() ) { + context.set( null ); + } + } + return session; + } + + /** + * JTA transaction synch used for cleanup of the internal session map. + */ + protected static class CleanupSynch implements Synchronization, Serializable { + protected final SessionFactory factory; + + public CleanupSynch(SessionFactory factory) { + this.factory = factory; + } + + public void beforeCompletion() { + } + + public void afterCompletion(int i) { + unbind( factory ); + } + } + + private class TransactionProtectionWrapper implements InvocationHandler, Serializable { + private final Session realSession; + private Session wrappedSession; + + public TransactionProtectionWrapper(Session realSession) { + this.realSession = realSession; + } + + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + try { + // If close() is called, guarantee unbind() + if ( "close".equals( method.getName()) ) { + unbind( realSession.getSessionFactory() ); + } + else if ( "toString".equals( method.getName() ) + || "equals".equals( method.getName() ) + || "hashCode".equals( method.getName() ) + || "getStatistics".equals( method.getName() ) + || "isOpen".equals( method.getName() ) ) { + // allow these to go through the the real session no matter what + } + else if ( !realSession.isOpen() ) { + // essentially, if the real session is closed allow any + // method call to pass through since the real session + // will complain by throwing an appropriate exception; + // NOTE that allowing close() above has the same basic effect, + // but we capture that there simply to perform the unbind... + } + else if ( !realSession.getTransaction().isActive() ) { + // limit the methods available if no transaction is active + if ( "beginTransaction".equals( method.getName() ) + || "getTransaction".equals( method.getName() ) + || "isTransactionInProgress".equals( method.getName() ) + || "setFlushMode".equals( method.getName() ) + || "getSessionFactory".equals( method.getName() ) ) { + log.trace( "allowing method [" + method.getName() + "] in non-transacted context" ); + } + else if ( "reconnect".equals( method.getName() ) + || "disconnect".equals( method.getName() ) ) { + // allow these (deprecated) methods to pass through + } + else { + throw new HibernateException( method.getName() + " is not valid without active transaction" ); + } + } + log.trace( "allowing proxied method [" + method.getName() + "] to proceed to real session" ); + return method.invoke( realSession, args ); + } + catch ( InvocationTargetException e ) { + if ( e.getTargetException() instanceof RuntimeException ) { + throw ( RuntimeException ) e.getTargetException(); + } + else { + throw e; + } + } + } + + public void setWrapped(Session wrapped) { + this.wrappedSession = wrapped; + } + + + // serialization ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + private void writeObject(ObjectOutputStream oos) throws IOException { + // if a ThreadLocalSessionContext-bound session happens to get + // serialized, to be completely correct, we need to make sure + // that unbinding of that session occurs. + oos.defaultWriteObject(); + if ( existingSession( factory ) == wrappedSession ) { + unbind( factory ); + } + } + + private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { + // on the inverse, it makes sense that if a ThreadLocalSessionContext- + // bound session then gets deserialized to go ahead and re-bind it to + // the ThreadLocalSessionContext session map. + ois.defaultReadObject(); + realSession.getTransaction().registerSynchronization( buildCleanupSynch() ); + doBind( wrappedSession, factory ); + } + } +} diff --git a/src/org/hibernate/criterion/AbstractEmptinessExpression.java b/src/org/hibernate/criterion/AbstractEmptinessExpression.java new file mode 100644 index 0000000000..7275be0bde --- /dev/null +++ b/src/org/hibernate/criterion/AbstractEmptinessExpression.java @@ -0,0 +1,86 @@ +// $Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.TypedValue; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.Loadable; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.sql.ConditionFragment; +import org.hibernate.type.CollectionType; +import org.hibernate.type.Type; + +/** + * Implementation of AbstractEmptinessExpression. + * + * @author Steve Ebersole + */ +public abstract class AbstractEmptinessExpression implements Criterion { + + private static final TypedValue[] NO_VALUES = new TypedValue[0]; + + protected final String propertyName; + + protected AbstractEmptinessExpression(String propertyName) { + this.propertyName = propertyName; + } + + protected abstract boolean excludeEmpty(); + + public final String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) throws HibernateException { + String entityName = criteriaQuery.getEntityName( criteria, propertyName ); + String actualPropertyName = criteriaQuery.getPropertyName( propertyName ); + String sqlAlias = criteriaQuery.getSQLAlias( criteria, propertyName ); + + SessionFactoryImplementor factory = criteriaQuery.getFactory(); + QueryableCollection collectionPersister = getQueryableCollection( entityName, actualPropertyName, factory ); + + String[] collectionKeys = collectionPersister.getKeyColumnNames(); + String[] ownerKeys = ( ( Loadable ) factory.getEntityPersister( entityName ) ).getIdentifierColumnNames(); + + String innerSelect = "(select 1 from " + collectionPersister.getTableName() + + " where " + + new ConditionFragment().setTableAlias( sqlAlias ).setCondition( ownerKeys, collectionKeys ).toFragmentString() + + ")"; + + return excludeEmpty() + ? "exists " + innerSelect + : "not exists " + innerSelect; + } + + + protected QueryableCollection getQueryableCollection(String entityName, String propertyName, SessionFactoryImplementor factory) + throws HibernateException { + PropertyMapping ownerMapping = ( PropertyMapping ) factory.getEntityPersister( entityName ); + Type type = ownerMapping.toType( propertyName ); + if ( !type.isCollectionType() ) { + throw new MappingException( + "Property path [" + entityName + "." + propertyName + "] does not reference a collection" + ); + } + + String role = ( ( CollectionType ) type ).getRole(); + try { + return ( QueryableCollection ) factory.getCollectionPersister( role ); + } + catch ( ClassCastException cce ) { + throw new QueryException( "collection role is not queryable: " + role ); + } + catch ( Exception e ) { + throw new QueryException( "collection role not found: " + role ); + } + } + + public final TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return NO_VALUES; + } + + public final String toString() { + return propertyName + ( excludeEmpty() ? " is not empty" : " is empty" ); + } +} diff --git a/src/org/hibernate/criterion/AggregateProjection.java b/src/org/hibernate/criterion/AggregateProjection.java new file mode 100644 index 0000000000..0489f7a254 --- /dev/null +++ b/src/org/hibernate/criterion/AggregateProjection.java @@ -0,0 +1,43 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + +/** + * An aggregation + * @author max + */ +public class AggregateProjection extends SimpleProjection { + + protected final String propertyName; + private final String aggregate; + + protected AggregateProjection(String aggregate, String propertyName) { + this.aggregate = aggregate; + this.propertyName = propertyName; + } + + public String toString() { + return aggregate + "(" + propertyName + ')'; + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new Type[] { criteriaQuery.getType(criteria, propertyName) }; + } + + public String toSqlString(Criteria criteria, int loc, CriteriaQuery criteriaQuery) + throws HibernateException { + return new StringBuffer() + .append(aggregate) + .append("(") + .append( criteriaQuery.getColumn(criteria, propertyName) ) + .append(") as y") + .append(loc) + .append('_') + .toString(); + } + +} diff --git a/src/org/hibernate/criterion/AliasedProjection.java b/src/org/hibernate/criterion/AliasedProjection.java new file mode 100755 index 0000000000..12c7ebd5d8 --- /dev/null +++ b/src/org/hibernate/criterion/AliasedProjection.java @@ -0,0 +1,65 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + +/** + * @author Gavin King + */ +public class AliasedProjection implements Projection { + + private final Projection projection; + private final String alias; + + public String toString() { + return projection.toString() + " as " + alias; + } + + protected AliasedProjection(Projection projection, String alias) { + this.projection = projection; + this.alias = alias; + } + + public String toSqlString(Criteria criteria, int position, CriteriaQuery criteriaQuery) + throws HibernateException { + return projection.toSqlString(criteria, position, criteriaQuery); + } + + public String toGroupSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return projection.toGroupSqlString(criteria, criteriaQuery); + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return projection.getTypes(criteria, criteriaQuery); + } + + public String[] getColumnAliases(int loc) { + return projection.getColumnAliases(loc); + } + + public Type[] getTypes(String alias, Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return this.alias.equals(alias) ? + getTypes(criteria, criteriaQuery) : + null; + } + + public String[] getColumnAliases(String alias, int loc) { + return this.alias.equals(alias) ? + getColumnAliases(loc) : + null; + } + + public String[] getAliases() { + return new String[]{ alias }; + } + + public boolean isGrouped() { + return projection.isGrouped(); + } + +} diff --git a/src/org/hibernate/criterion/AvgProjection.java b/src/org/hibernate/criterion/AvgProjection.java new file mode 100755 index 0000000000..504352ade3 --- /dev/null +++ b/src/org/hibernate/criterion/AvgProjection.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + +/** + * @author Gavin King + */ +public class AvgProjection extends AggregateProjection { + + public AvgProjection(String propertyName) { + super("avg", propertyName); + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new Type[] { Hibernate.DOUBLE }; + } +} diff --git a/src/org/hibernate/criterion/BetweenExpression.java b/src/org/hibernate/criterion/BetweenExpression.java new file mode 100644 index 0000000000..6873f0618c --- /dev/null +++ b/src/org/hibernate/criterion/BetweenExpression.java @@ -0,0 +1,48 @@ +//$Id$ +package org.hibernate.criterion; + + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; +import org.hibernate.util.StringHelper; + +/** + * Constrains a property to between two values + * @author Gavin King + */ +public class BetweenExpression implements Criterion { + + private final String propertyName; + private final Object lo; + private final Object hi; + + protected BetweenExpression(String propertyName, Object lo, Object hi) { + this.propertyName = propertyName; + this.lo = lo; + this.hi = hi; + } + + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return StringHelper.join( + " and ", + StringHelper.suffix( criteriaQuery.getColumnsUsingProjection(criteria, propertyName), " between ? and ?" ) + ); + + //TODO: get SQL rendering out of this package! + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new TypedValue[] { + criteriaQuery.getTypedValue(criteria, propertyName, lo), + criteriaQuery.getTypedValue(criteria, propertyName, hi) + }; + } + + public String toString() { + return propertyName + " between " + lo + " and " + hi; + } + +} diff --git a/src/org/hibernate/criterion/Conjunction.java b/src/org/hibernate/criterion/Conjunction.java new file mode 100644 index 0000000000..cd1769835f --- /dev/null +++ b/src/org/hibernate/criterion/Conjunction.java @@ -0,0 +1,13 @@ +package org.hibernate.criterion; + + +/** + * @author Gavin King + */ +public class Conjunction extends Junction { + + public Conjunction() { + super("and"); + } + +} diff --git a/src/org/hibernate/criterion/CountProjection.java b/src/org/hibernate/criterion/CountProjection.java new file mode 100755 index 0000000000..36a8ee21d6 --- /dev/null +++ b/src/org/hibernate/criterion/CountProjection.java @@ -0,0 +1,51 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + +/** + * A count + * @author Gavin King + */ +public class CountProjection extends AggregateProjection { + + private boolean distinct; + + protected CountProjection(String prop) { + super("count", prop); + } + + public String toString() { + if(distinct) { + return "distinct " + super.toString(); + } else { + return super.toString(); + } + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new Type[] { Hibernate.INTEGER }; + } + + public String toSqlString(Criteria criteria, int position, CriteriaQuery criteriaQuery) + throws HibernateException { + StringBuffer buf = new StringBuffer(); + buf.append("count("); + if (distinct) buf.append("distinct "); + return buf.append( criteriaQuery.getColumn(criteria, propertyName) ) + .append(") as y") + .append(position) + .append('_') + .toString(); + } + + public CountProjection setDistinct() { + distinct = true; + return this; + } + +} diff --git a/src/org/hibernate/criterion/CriteriaQuery.java b/src/org/hibernate/criterion/CriteriaQuery.java new file mode 100755 index 0000000000..d264f881dd --- /dev/null +++ b/src/org/hibernate/criterion/CriteriaQuery.java @@ -0,0 +1,92 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.TypedValue; +import org.hibernate.type.Type; + +/** + * An instance of CriteriaQuery is passed to criterion, + * order and projection instances when actually compiling and + * executing the query. This interface is not used by application + * code. + * + * @author Gavin King + */ +public interface CriteriaQuery { + public SessionFactoryImplementor getFactory(); + + /** + * Get the names of the columns mapped by a property path, + * ignoring projection aliases + */ + public String getColumn(Criteria criteria, String propertyPath) + throws HibernateException; + + /** + * Get the type of a property path, ignoring projection aliases + */ + public Type getType(Criteria criteria, String propertyPath) + throws HibernateException; + + /** + * Get the names of the columns mapped by a property path + */ + public String[] getColumnsUsingProjection(Criteria criteria, String propertyPath) + throws HibernateException; + + /** + * Get the type of a property path + */ + public Type getTypeUsingProjection(Criteria criteria, String propertyPath) + throws HibernateException; + + /** + * Get the a typed value for the given property value. + */ + public TypedValue getTypedValue(Criteria criteria, String propertyPath, Object value) + throws HibernateException; + + /** + * Get the entity name of an entity + */ + public String getEntityName(Criteria criteria); + + /** + * Get the entity name of an entity, taking into account + * the qualifier of the property path + */ + public String getEntityName(Criteria criteria, String propertyPath); + + /** + * Get the root table alias of an entity + */ + public String getSQLAlias(Criteria subcriteria); + + /** + * Get the root table alias of an entity, taking into account + * the qualifier of the property path + */ + public String getSQLAlias(Criteria criteria, String propertyPath); + + /** + * Get the property name, given a possibly qualified property name + */ + public String getPropertyName(String propertyName); + + /** + * Get the identifier column names of this entity + */ + public String[] getIdentifierColumns(Criteria subcriteria); + + /** + * Get the identifier type of this entity + */ + public Type getIdentifierType(Criteria subcriteria); + + public TypedValue getTypedIdentifierValue(Criteria subcriteria, Object value); + + public String generateSQLAlias(); +} \ No newline at end of file diff --git a/src/org/hibernate/criterion/CriteriaSpecification.java b/src/org/hibernate/criterion/CriteriaSpecification.java new file mode 100755 index 0000000000..167649fb2d --- /dev/null +++ b/src/org/hibernate/criterion/CriteriaSpecification.java @@ -0,0 +1,55 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.transform.AliasToEntityMapResultTransformer; +import org.hibernate.transform.DistinctRootEntityResultTransformer; +import org.hibernate.transform.PassThroughResultTransformer; +import org.hibernate.transform.ResultTransformer; +import org.hibernate.transform.RootEntityResultTransformer; + +/** + * @author Gavin King + */ +public interface CriteriaSpecification { + + /** + * The alias that refers to the "root" entity of the criteria query. + */ + public static final String ROOT_ALIAS = "this"; + + /** + * Each row of results is a Map from alias to entity instance + */ + public static final ResultTransformer ALIAS_TO_ENTITY_MAP = new AliasToEntityMapResultTransformer(); + + /** + * Each row of results is an instance of the root entity + */ + public static final ResultTransformer ROOT_ENTITY = new RootEntityResultTransformer(); + + /** + * Each row of results is a distinct instance of the root entity + */ + public static final ResultTransformer DISTINCT_ROOT_ENTITY = new DistinctRootEntityResultTransformer(); + + /** + * This result transformer is selected implicitly by calling setProjection() + */ + public static final ResultTransformer PROJECTION = new PassThroughResultTransformer(); + + /** + * Specifies joining to an entity based on an inner join. + */ + public static final int INNER_JOIN = org.hibernate.sql.JoinFragment.INNER_JOIN; + + /** + * Specifies joining to an entity based on a full join. + */ + public static final int FULL_JOIN = org.hibernate.sql.JoinFragment.FULL_JOIN; + + /** + * Specifies joining to an entity based on a left outer join. + */ + public static final int LEFT_JOIN = org.hibernate.sql.JoinFragment.LEFT_OUTER_JOIN; + +} diff --git a/src/org/hibernate/criterion/Criterion.java b/src/org/hibernate/criterion/Criterion.java new file mode 100644 index 0000000000..64dfe96de0 --- /dev/null +++ b/src/org/hibernate/criterion/Criterion.java @@ -0,0 +1,44 @@ +//$Id$ +package org.hibernate.criterion; + + +import java.io.Serializable; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; + +/** + * An object-oriented representation of a query criterion that may be used + * as a restriction in a Criteria query. + * Built-in criterion types are provided by the Restrictions factory + * class. This interface might be implemented by application classes that + * define custom restriction criteria. + * + * @see Restrictions + * @see org.hibernate.Criteria + * @author Gavin King + */ +public interface Criterion extends Serializable { + + /** + * Render the SQL fragment + * @param criteriaQuery + * @param alias + * @return String + * @throws HibernateException + */ + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException; + + /** + * Return typed values for all parameters in the rendered SQL fragment + * @param criteria TODO + * @param criteriaQuery + * @return TypedValue[] + * @throws HibernateException + */ + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException; + +} diff --git a/src/org/hibernate/criterion/DetachedCriteria.java b/src/org/hibernate/criterion/DetachedCriteria.java new file mode 100755 index 0000000000..53d731bf86 --- /dev/null +++ b/src/org/hibernate/criterion/DetachedCriteria.java @@ -0,0 +1,125 @@ +//$Id$ +package org.hibernate.criterion; + +import java.io.Serializable; + +import org.hibernate.Criteria; +import org.hibernate.FetchMode; +import org.hibernate.HibernateException; +import org.hibernate.Session; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.impl.CriteriaImpl; +import org.hibernate.impl.SessionImpl; +import org.hibernate.transform.ResultTransformer; + +/** + * Some applications need to create criteria queries in "detached + * mode", where the Hibernate session is not available. This class + * may be instantiated anywhere, and then a Criteria + * may be obtained by passing a session to + * getExecutableCriteria(). All methods have the + * same semantics and behavior as the corresponding methods of the + * Criteria interface. + * + * @see org.hibernate.Criteria + * @author Gavin King + */ +public class DetachedCriteria implements CriteriaSpecification, Serializable { + + private final CriteriaImpl impl; + private final Criteria criteria; + + protected DetachedCriteria(String entityName) { + impl = new CriteriaImpl(entityName, null); + criteria = impl; + } + + protected DetachedCriteria(String entityName, String alias) { + impl = new CriteriaImpl(entityName, alias, null); + criteria = impl; + } + + protected DetachedCriteria(CriteriaImpl impl, Criteria criteria) { + this.impl = impl; + this.criteria = criteria; + } + + /** + * Get an executable instance of Criteria, + * to actually run the query. + */ + public Criteria getExecutableCriteria(Session session) { + impl.setSession( ( SessionImplementor ) session ); + return impl; + } + + public static DetachedCriteria forEntityName(String entityName) { + return new DetachedCriteria(entityName); + } + + public static DetachedCriteria forEntityName(String entityName, String alias) { + return new DetachedCriteria(entityName, alias); + } + + public static DetachedCriteria forClass(Class clazz) { + return new DetachedCriteria( clazz.getName() ); + } + + public static DetachedCriteria forClass(Class clazz, String alias) { + return new DetachedCriteria( clazz.getName() , alias ); + } + + public DetachedCriteria add(Criterion criterion) { + criteria.add(criterion); + return this; + } + + public DetachedCriteria addOrder(Order order) { + criteria.addOrder(order); + return this; + } + + public DetachedCriteria createAlias(String associationPath, String alias) + throws HibernateException { + criteria.createAlias(associationPath, alias); + return this; + } + + public DetachedCriteria createCriteria(String associationPath, String alias) + throws HibernateException { + return new DetachedCriteria( impl, criteria.createCriteria(associationPath) ); + } + + public DetachedCriteria createCriteria(String associationPath) + throws HibernateException { + return new DetachedCriteria( impl, criteria.createCriteria(associationPath) ); + } + + public String getAlias() { + return criteria.getAlias(); + } + + public DetachedCriteria setFetchMode(String associationPath, FetchMode mode) + throws HibernateException { + criteria.setFetchMode(associationPath, mode); + return this; + } + + public DetachedCriteria setProjection(Projection projection) { + criteria.setProjection(projection); + return this; + } + + public DetachedCriteria setResultTransformer(ResultTransformer resultTransformer) { + criteria.setResultTransformer(resultTransformer); + return this; + } + + public String toString() { + return "DetachableCriteria(" + criteria.toString() + ')'; + } + + CriteriaImpl getCriteriaImpl() { + return impl; + } +} diff --git a/src/org/hibernate/criterion/Disjunction.java b/src/org/hibernate/criterion/Disjunction.java new file mode 100644 index 0000000000..85fd2d0c54 --- /dev/null +++ b/src/org/hibernate/criterion/Disjunction.java @@ -0,0 +1,13 @@ +package org.hibernate.criterion; + + +/** + * @author Gavin King + */ +public class Disjunction extends Junction { + + protected Disjunction() { + super("or"); + } + +} diff --git a/src/org/hibernate/criterion/Distinct.java b/src/org/hibernate/criterion/Distinct.java new file mode 100755 index 0000000000..d25bd3c0d7 --- /dev/null +++ b/src/org/hibernate/criterion/Distinct.java @@ -0,0 +1,58 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + +/** + * @author Gavin King + */ +public class Distinct implements Projection { + + private final Projection projection; + + public Distinct(Projection proj) { + this.projection = proj; + } + + public String toSqlString(Criteria criteria, int position, CriteriaQuery criteriaQuery) + throws HibernateException { + return "distinct " + projection.toSqlString(criteria, position, criteriaQuery); + } + + public String toGroupSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return projection.toGroupSqlString(criteria, criteriaQuery); + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return projection.getTypes(criteria, criteriaQuery); + } + + public Type[] getTypes(String alias, Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return projection.getTypes(alias, criteria, criteriaQuery); + } + + public String[] getColumnAliases(int loc) { + return projection.getColumnAliases(loc); + } + + public String[] getColumnAliases(String alias, int loc) { + return projection.getColumnAliases(alias, loc); + } + + public String[] getAliases() { + return projection.getAliases(); + } + + public boolean isGrouped() { + return projection.isGrouped(); + } + + public String toString() { + return "distinct " + projection.toString(); + } +} diff --git a/src/org/hibernate/criterion/EmptyExpression.java b/src/org/hibernate/criterion/EmptyExpression.java new file mode 100755 index 0000000000..44301cafb5 --- /dev/null +++ b/src/org/hibernate/criterion/EmptyExpression.java @@ -0,0 +1,17 @@ +//$Id$ +package org.hibernate.criterion; + +/** + * @author Gavin King + */ +public class EmptyExpression extends AbstractEmptinessExpression implements Criterion { + + protected EmptyExpression(String propertyName) { + super( propertyName ); + } + + protected boolean excludeEmpty() { + return false; + } + +} diff --git a/src/org/hibernate/criterion/Example.java b/src/org/hibernate/criterion/Example.java new file mode 100644 index 0000000000..5e057bc71e --- /dev/null +++ b/src/org/hibernate/criterion/Example.java @@ -0,0 +1,374 @@ +//$Id$ +package org.hibernate.criterion; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.hibernate.Criteria; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.Type; +import org.hibernate.util.StringHelper; + +/** + * Support for query by example. + *

    + * List results = session.createCriteria(Parent.class)
    + *     .add( Example.create(parent).ignoreCase() )
    + *     .createCriteria("child")
    + *         .add( Example.create( parent.getChild() ) )
    + *     .list();
    + * 
    + * "Examples" may be mixed and matched with "Expressions" in the same Criteria. + * @see org.hibernate.Criteria + * @author Gavin King + */ + +public class Example implements Criterion { + + private final Object entity; + private final Set excludedProperties = new HashSet(); + private PropertySelector selector; + private boolean isLikeEnabled; + private Character escapeCharacter; + private boolean isIgnoreCaseEnabled; + private MatchMode matchMode; + + /** + * A strategy for choosing property values for inclusion in the query + * criteria + */ + + public static interface PropertySelector extends Serializable { + public boolean include(Object propertyValue, String propertyName, Type type); + } + + private static final PropertySelector NOT_NULL = new NotNullPropertySelector(); + private static final PropertySelector ALL = new AllPropertySelector(); + private static final PropertySelector NOT_NULL_OR_ZERO = new NotNullOrZeroPropertySelector(); + + static final class AllPropertySelector implements PropertySelector { + public boolean include(Object object, String propertyName, Type type) { + return true; + } + + private Object readResolve() { + return ALL; + } + } + + static final class NotNullPropertySelector implements PropertySelector { + public boolean include(Object object, String propertyName, Type type) { + return object!=null; + } + + private Object readResolve() { + return NOT_NULL; + } + } + + static final class NotNullOrZeroPropertySelector implements PropertySelector { + public boolean include(Object object, String propertyName, Type type) { + return object!=null && ( + !(object instanceof Number) || ( (Number) object ).longValue()!=0 + ); + } + + private Object readResolve() { + return NOT_NULL_OR_ZERO; + } + } + + /** + * Set escape character for "like" clause + */ + public Example setEscapeCharacter(Character escapeCharacter) { + this.escapeCharacter = escapeCharacter; + return this; + } + + /** + * Set the property selector + */ + public Example setPropertySelector(PropertySelector selector) { + this.selector = selector; + return this; + } + + /** + * Exclude zero-valued properties + */ + public Example excludeZeroes() { + setPropertySelector(NOT_NULL_OR_ZERO); + return this; + } + + /** + * Don't exclude null or zero-valued properties + */ + public Example excludeNone() { + setPropertySelector(ALL); + return this; + } + + /** + * Use the "like" operator for all string-valued properties + */ + public Example enableLike(MatchMode matchMode) { + isLikeEnabled = true; + this.matchMode = matchMode; + return this; + } + + /** + * Use the "like" operator for all string-valued properties + */ + public Example enableLike() { + return enableLike(MatchMode.EXACT); + } + + /** + * Ignore case for all string-valued properties + */ + public Example ignoreCase() { + isIgnoreCaseEnabled = true; + return this; + } + + /** + * Exclude a particular named property + */ + public Example excludeProperty(String name) { + excludedProperties.add(name); + return this; + } + + /** + * Create a new instance, which includes all non-null properties + * by default + * @param entity + * @return a new instance of Example + */ + public static Example create(Object entity) { + if (entity==null) throw new NullPointerException("null example"); + return new Example(entity, NOT_NULL); + } + + protected Example(Object entity, PropertySelector selector) { + this.entity = entity; + this.selector = selector; + } + + public String toString() { + return "example (" + entity + ')'; + } + + private boolean isPropertyIncluded(Object value, String name, Type type) { + return !excludedProperties.contains(name) && + !type.isAssociationType() && + selector.include(value, name, type); + } + + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + + StringBuffer buf = new StringBuffer().append('('); + EntityPersister meta = criteriaQuery.getFactory().getEntityPersister( criteriaQuery.getEntityName(criteria) ); + String[] propertyNames = meta.getPropertyNames(); + Type[] propertyTypes = meta.getPropertyTypes(); + //TODO: get all properties, not just the fetched ones! + Object[] propertyValues = meta.getPropertyValues( entity, getEntityMode(criteria, criteriaQuery) ); + for (int i=0; iRestrictions. + * + * @see Restrictions + * @author Gavin King + */ +public final class Expression extends Restrictions { + + private Expression() { + //cannot be instantiated + } + + /** + * Apply a constraint expressed in SQL, with the given JDBC + * parameters. Any occurrences of {alias} will be + * replaced by the table alias. + * + * @param sql + * @param values + * @param types + * @return Criterion + */ + public static Criterion sql(String sql, Object[] values, Type[] types) { + return new SQLCriterion(sql, values, types); + } + /** + * Apply a constraint expressed in SQL, with the given JDBC + * parameter. Any occurrences of {alias} will be replaced + * by the table alias. + * + * @param sql + * @param value + * @param type + * @return Criterion + */ + public static Criterion sql(String sql, Object value, Type type) { + return new SQLCriterion(sql, new Object[] { value }, new Type[] { type } ); + } + /** + * Apply a constraint expressed in SQL. Any occurrences of {alias} + * will be replaced by the table alias. + * + * @param sql + * @return Criterion + */ + public static Criterion sql(String sql) { + return new SQLCriterion(sql, ArrayHelper.EMPTY_OBJECT_ARRAY, ArrayHelper.EMPTY_TYPE_ARRAY); + } + +} diff --git a/src/org/hibernate/criterion/IdentifierEqExpression.java b/src/org/hibernate/criterion/IdentifierEqExpression.java new file mode 100755 index 0000000000..1be5eeeca8 --- /dev/null +++ b/src/org/hibernate/criterion/IdentifierEqExpression.java @@ -0,0 +1,46 @@ +//$Id$ +package org.hibernate.criterion; + + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; +import org.hibernate.util.StringHelper; + +/** + * An identifier constraint + * @author Gavin King + */ +public class IdentifierEqExpression implements Criterion { + + private final Object value; + + protected IdentifierEqExpression(Object value) { + this.value = value; + } + + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + + String[] columns = criteriaQuery.getIdentifierColumns(criteria); + + String result = StringHelper.join( + " and ", + StringHelper.suffix( columns, " = ?" ) + ); + if (columns.length>1) result = '(' + result + ')'; + return result; + + //TODO: get SQL rendering out of this package! + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new TypedValue[] { criteriaQuery.getTypedIdentifierValue(criteria, value) }; + } + + public String toString() { + return "id = " + value; + } + +} diff --git a/src/org/hibernate/criterion/IdentifierProjection.java b/src/org/hibernate/criterion/IdentifierProjection.java new file mode 100755 index 0000000000..1a598ca175 --- /dev/null +++ b/src/org/hibernate/criterion/IdentifierProjection.java @@ -0,0 +1,61 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; +import org.hibernate.util.StringHelper; + +/** + * A property value, or grouped property value + * @author Gavin King + */ +public class IdentifierProjection extends SimpleProjection { + + private boolean grouped; + + protected IdentifierProjection(boolean grouped) { + this.grouped = grouped; + } + + protected IdentifierProjection() { + this(false); + } + + public String toString() { + return "id"; + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new Type[] { criteriaQuery.getIdentifierType(criteria) }; + } + + public String toSqlString(Criteria criteria, int position, CriteriaQuery criteriaQuery) + throws HibernateException { + StringBuffer buf = new StringBuffer(); + String[] cols = criteriaQuery.getIdentifierColumns(criteria); + for ( int i=0; i1 ) singleValueParam = '(' + singleValueParam + ')'; + String params = values.length>0 ? + StringHelper.repeat( singleValueParam + ", ", values.length-1 ) + singleValueParam : + ""; + String cols = StringHelper.join(", ", columns); + if ( columns.length>1 ) cols = '(' + cols + ')'; + return cols + " in (" + params + ')'; + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + ArrayList list = new ArrayList(); + Type type = criteriaQuery.getTypeUsingProjection(criteria, propertyName); + if ( type.isComponentType() ) { + AbstractComponentType actype = (AbstractComponentType) type; + Type[] types = actype.getSubtypes(); + for ( int i=0; i1) result = '(' + result + ')'; + return result; + + //TODO: get SQL rendering out of this package! + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return NO_VALUES; + } + + public String toString() { + return propertyName + " is not null"; + } + +} diff --git a/src/org/hibernate/criterion/NullExpression.java b/src/org/hibernate/criterion/NullExpression.java new file mode 100644 index 0000000000..6e5cc046d0 --- /dev/null +++ b/src/org/hibernate/criterion/NullExpression.java @@ -0,0 +1,46 @@ +//$Id$ +package org.hibernate.criterion; + + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; +import org.hibernate.util.StringHelper; + +/** + * Constrains a property to be null + * @author Gavin King + */ +public class NullExpression implements Criterion { + + private final String propertyName; + + private static final TypedValue[] NO_VALUES = new TypedValue[0]; + + protected NullExpression(String propertyName) { + this.propertyName = propertyName; + } + + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + String[] columns = criteriaQuery.getColumnsUsingProjection(criteria, propertyName); + String result = StringHelper.join( + " and ", + StringHelper.suffix( columns, " is null" ) + ); + if (columns.length>1) result = '(' + result + ')'; + return result; + + //TODO: get SQL rendering out of this package! + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return NO_VALUES; + } + + public String toString() { + return propertyName + " is null"; + } + +} diff --git a/src/org/hibernate/criterion/Order.java b/src/org/hibernate/criterion/Order.java new file mode 100644 index 0000000000..581ccfcfcf --- /dev/null +++ b/src/org/hibernate/criterion/Order.java @@ -0,0 +1,83 @@ +//$Id$ +package org.hibernate.criterion; + +import java.io.Serializable; +import java.sql.Types; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Represents an order imposed upon a Criteria result set + * @author Gavin King + */ +public class Order implements Serializable { + + private boolean ascending; + private boolean ignoreCase; + private String propertyName; + + public String toString() { + return propertyName + ' ' + (ascending?"asc":"desc"); + } + + public Order ignoreCase() { + ignoreCase = true; + return this; + } + + /** + * Constructor for Order. + */ + protected Order(String propertyName, boolean ascending) { + this.propertyName = propertyName; + this.ascending = ascending; + } + + /** + * Render the SQL fragment + * + */ + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + String[] columns = criteriaQuery.getColumnsUsingProjection(criteria, propertyName); + Type type = criteriaQuery.getTypeUsingProjection(criteria, propertyName); + StringBuffer fragment = new StringBuffer(); + for ( int i=0; iCriteria query. Built-in projection types are provided + * by the Projections factory class. + * This interface might be implemented by application classes that + * define custom projections. + * + * @see Projections + * @see org.hibernate.Criteria + * @author Gavin King + */ +public interface Projection extends Serializable { + + /** + * Render the SQL fragment + * @param criteriaQuery + * @param columnAlias + * @return String + * @throws HibernateException + */ + public String toSqlString(Criteria criteria, int position, CriteriaQuery criteriaQuery) + throws HibernateException; + + /** + * Render the SQL fragment to be used in the group by clause + * @param criteriaQuery + * @param columnAlias + * @return String + * @throws HibernateException + */ + public String toGroupSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException; + + /** + * Return types returned by the rendered SQL fragment + * @param criteria + * @param criteriaQuery + * @return Type[] + * @throws HibernateException + */ + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException; + /** + * Return types for a particular user-visible alias + */ + public Type[] getTypes(String alias, Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException; + + /** + * Get the SQL select clause column aliases + */ + public String[] getColumnAliases(int loc); + /** + * Get the SQL select clause column aliases for a particular + * user-visible alias + */ + public String[] getColumnAliases(String alias, int loc); + + /** + * Get the user-visible aliases for this projection + * (ie. the ones that will be passed to the + * ResultTransformer) + */ + public String[] getAliases(); + + /** + * Does this projection specify grouping attributes? + */ + public boolean isGrouped(); + +} diff --git a/src/org/hibernate/criterion/ProjectionList.java b/src/org/hibernate/criterion/ProjectionList.java new file mode 100755 index 0000000000..9f4b86dd41 --- /dev/null +++ b/src/org/hibernate/criterion/ProjectionList.java @@ -0,0 +1,125 @@ +//$Id$ +package org.hibernate.criterion; + +import java.util.ArrayList; +import java.util.List; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; + +/** + * @author Gavin King + */ +public class ProjectionList implements Projection { + + private List elements = new ArrayList(); + + protected ProjectionList() {} + + public ProjectionList create() { + return new ProjectionList(); + } + + public ProjectionList add(Projection proj) { + elements.add(proj); + return this; + } + + public ProjectionList add(Projection projection, String alias) { + return add( Projections.alias(projection, alias) ); + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + List types = new ArrayList( getLength() ); + for ( int i=0; i2 ) buf.setLength( buf.length()-2 ); //pull off the last ", " + return buf.toString(); + } + + public String[] getColumnAliases(int loc) { + List result = new ArrayList( getLength() ); + for ( int i=0; icriterion package may be used by applications as a framework for building + * new kinds of Projection. However, it is intended that most applications will + * simply use the built-in projection types via the static factory methods of this class.
    + *
    + * The factory methods that take an alias allow the projected value to be referred to by + * criterion and order instances. + * + * @see org.hibernate.Criteria + * @see Restrictions factory methods for Criterion instances + * @author Gavin King + */ +public final class Projections { + + private Projections() { + //cannot be instantiated + } + + /** + * Create a distinct projection from a projection + */ + public static Projection distinct(Projection proj) { + return new Distinct(proj); + } + + /** + * Create a new projection list + */ + public static ProjectionList projectionList() { + return new ProjectionList(); + } + + /** + * The query row count, ie. count(*) + */ + public static Projection rowCount() { + return new RowCountProjection(); + } + + /** + * A property value count + */ + public static CountProjection count(String propertyName) { + return new CountProjection(propertyName); + } + + /** + * A distinct property value count + */ + public static CountProjection countDistinct(String propertyName) { + return new CountProjection(propertyName).setDistinct(); + } + + /** + * A property maximum value + */ + public static AggregateProjection max(String propertyName) { + return new AggregateProjection("max", propertyName); + } + + /** + * A property minimum value + */ + public static AggregateProjection min(String propertyName) { + return new AggregateProjection("min", propertyName); + } + + /** + * A property average value + */ + public static AggregateProjection avg(String propertyName) { + return new AvgProjection(propertyName); + } + + /** + * A property value sum + */ + public static AggregateProjection sum(String propertyName) { + return new AggregateProjection("sum", propertyName); + } + + /** + * A SQL projection, a typed select clause fragment + */ + public static Projection sqlProjection(String sql, String[] columnAliases, Type[] types) { + return new SQLProjection(sql, columnAliases, types); + } + + /** + * A grouping SQL projection, specifying both select clause and group by clause fragments + */ + public static Projection sqlGroupProjection(String sql, String groupBy, String[] columnAliases, Type[] types) { + return new SQLProjection(sql, groupBy, columnAliases, types); + } + + /** + * A grouping property value + */ + public static PropertyProjection groupProperty(String propertyName) { + return new PropertyProjection(propertyName, true); + } + + /** + * A projected property value + */ + public static PropertyProjection property(String propertyName) { + return new PropertyProjection(propertyName); + } + + /** + * A projected identifier value + */ + public static IdentifierProjection id() { + return new IdentifierProjection(); + } + + /** + * Assign an alias to a projection, by wrapping it + */ + public static Projection alias(Projection projection, String alias) { + return new AliasedProjection(projection, alias); + } +} diff --git a/src/org/hibernate/criterion/Property.java b/src/org/hibernate/criterion/Property.java new file mode 100755 index 0000000000..cfd1991559 --- /dev/null +++ b/src/org/hibernate/criterion/Property.java @@ -0,0 +1,235 @@ +//$Id$ +package org.hibernate.criterion; + +import java.util.Collection; + +/** + * A factory for property-specific criterion and projection instances + * @author Gavin King + */ +public class Property extends PropertyProjection { + //private String propertyName; + protected Property(String propertyName) { + super(propertyName); + } + + public Criterion between(Object min, Object max) { + return Restrictions.between(getPropertyName(), min, max); + } + + public Criterion in(Collection values) { + return Restrictions.in(getPropertyName(), values); + } + + public Criterion in(Object[] values) { + return Restrictions.in(getPropertyName(), values); + } + + public SimpleExpression like(Object value) { + return Restrictions.like(getPropertyName(), value); + } + + public SimpleExpression like(String value, MatchMode matchMode) { + return Restrictions.like(getPropertyName(), value, matchMode); + } + + public SimpleExpression eq(Object value) { + return Restrictions.eq(getPropertyName(), value); + } + + public SimpleExpression ne(Object value) { + return Restrictions.ne(getPropertyName(), value); + } + + public SimpleExpression gt(Object value) { + return Restrictions.gt(getPropertyName(), value); + } + + public SimpleExpression lt(Object value) { + return Restrictions.lt(getPropertyName(), value); + } + + public SimpleExpression le(Object value) { + return Restrictions.le(getPropertyName(), value); + } + + public SimpleExpression ge(Object value) { + return Restrictions.ge(getPropertyName(), value); + } + + public PropertyExpression eqProperty(Property other) { + return Restrictions.eqProperty( getPropertyName(), other.getPropertyName() ); + } + + public PropertyExpression neProperty(Property other) { + return Restrictions.neProperty( getPropertyName(), other.getPropertyName() ); + } + + public PropertyExpression leProperty(Property other) { + return Restrictions.leProperty( getPropertyName(), other.getPropertyName() ); + } + + public PropertyExpression geProperty(Property other) { + return Restrictions.geProperty( getPropertyName(), other.getPropertyName() ); + } + + public PropertyExpression ltProperty(Property other) { + return Restrictions.ltProperty( getPropertyName(), other.getPropertyName() ); + } + + public PropertyExpression gtProperty(Property other) { + return Restrictions.gtProperty( getPropertyName(), other.getPropertyName() ); + } + + public PropertyExpression eqProperty(String other) { + return Restrictions.eqProperty( getPropertyName(), other ); + } + + public PropertyExpression neProperty(String other) { + return Restrictions.neProperty( getPropertyName(), other ); + } + + public PropertyExpression leProperty(String other) { + return Restrictions.leProperty( getPropertyName(), other ); + } + + public PropertyExpression geProperty(String other) { + return Restrictions.geProperty( getPropertyName(), other ); + } + + public PropertyExpression ltProperty(String other) { + return Restrictions.ltProperty( getPropertyName(), other ); + } + + public PropertyExpression gtProperty(String other) { + return Restrictions.gtProperty( getPropertyName(), other ); + } + + public Criterion isNull() { + return Restrictions.isNull(getPropertyName()); + } + + public Criterion isNotNull() { + return Restrictions.isNotNull(getPropertyName()); + } + + public Criterion isEmpty() { + return Restrictions.isEmpty(getPropertyName()); + } + + public Criterion isNotEmpty() { + return Restrictions.isNotEmpty(getPropertyName()); + } + + public CountProjection count() { + return Projections.count(getPropertyName()); + } + + public AggregateProjection max() { + return Projections.max(getPropertyName()); + } + + public AggregateProjection min() { + return Projections.min(getPropertyName()); + } + + public AggregateProjection avg() { + return Projections.avg(getPropertyName()); + } + + /*public PropertyProjection project() { + return Projections.property(getPropertyName()); + }*/ + + public PropertyProjection group() { + return Projections.groupProperty(getPropertyName()); + } + + public Order asc() { + return Order.asc(getPropertyName()); + } + + public Order desc() { + return Order.desc(getPropertyName()); + } + + public static Property forName(String propertyName) { + return new Property(propertyName); + } + + /** + * Get a component attribute of this property + */ + public Property getProperty(String propertyName) { + return forName( getPropertyName() + '.' + propertyName ); + } + + public Criterion eq(DetachedCriteria subselect) { + return Subqueries.propertyEq( getPropertyName(), subselect ); + } + + public Criterion ne(DetachedCriteria subselect) { + return Subqueries.propertyNe( getPropertyName(), subselect ); + } + + public Criterion lt(DetachedCriteria subselect) { + return Subqueries.propertyLt( getPropertyName(), subselect ); + } + + public Criterion le(DetachedCriteria subselect) { + return Subqueries.propertyLe( getPropertyName(), subselect ); + } + + public Criterion gt(DetachedCriteria subselect) { + return Subqueries.propertyGt( getPropertyName(), subselect ); + } + + public Criterion ge(DetachedCriteria subselect) { + return Subqueries.propertyGe( getPropertyName(), subselect ); + } + + public Criterion notIn(DetachedCriteria subselect) { + return Subqueries.propertyNotIn( getPropertyName(), subselect ); + } + + public Criterion in(DetachedCriteria subselect) { + return Subqueries.propertyIn( getPropertyName(), subselect ); + } + + public Criterion eqAll(DetachedCriteria subselect) { + return Subqueries.propertyEqAll( getPropertyName(), subselect ); + } + + public Criterion gtAll(DetachedCriteria subselect) { + return Subqueries.propertyGtAll( getPropertyName(), subselect ); + } + + public Criterion ltAll(DetachedCriteria subselect) { + return Subqueries.propertyLtAll( getPropertyName(), subselect ); + } + + public Criterion leAll(DetachedCriteria subselect) { + return Subqueries.propertyLeAll( getPropertyName(), subselect ); + } + + public Criterion geAll(DetachedCriteria subselect) { + return Subqueries.propertyGeAll( getPropertyName(), subselect ); + } + + public Criterion gtSome(DetachedCriteria subselect) { + return Subqueries.propertyGtSome( getPropertyName(), subselect ); + } + + public Criterion ltSome(DetachedCriteria subselect) { + return Subqueries.propertyLtSome( getPropertyName(), subselect ); + } + + public Criterion leSome(DetachedCriteria subselect) { + return Subqueries.propertyLeSome( getPropertyName(), subselect ); + } + + public Criterion geSome(DetachedCriteria subselect) { + return Subqueries.propertyGeSome( getPropertyName(), subselect ); + } + +} diff --git a/src/org/hibernate/criterion/PropertyExpression.java b/src/org/hibernate/criterion/PropertyExpression.java new file mode 100644 index 0000000000..c7b23f5da4 --- /dev/null +++ b/src/org/hibernate/criterion/PropertyExpression.java @@ -0,0 +1,54 @@ +//$Id$ +package org.hibernate.criterion; + + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; +import org.hibernate.util.StringHelper; + +/** + * superclass for comparisons between two properties (with SQL binary operators) + * @author Gavin King + */ +public class PropertyExpression implements Criterion { + + private final String propertyName; + private final String otherPropertyName; + private final String op; + + private static final TypedValue[] NO_TYPED_VALUES = new TypedValue[0]; + + protected PropertyExpression(String propertyName, String otherPropertyName, String op) { + this.propertyName = propertyName; + this.otherPropertyName = otherPropertyName; + this.op = op; + } + + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + String[] xcols = criteriaQuery.getColumnsUsingProjection(criteria, propertyName); + String[] ycols = criteriaQuery.getColumnsUsingProjection(criteria, otherPropertyName); + String result = StringHelper.join( + " and ", + StringHelper.add(xcols, getOp(), ycols) + ); + if (xcols.length>1) result = '(' + result + ')'; + return result; + //TODO: get SQL rendering out of this package! + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return NO_TYPED_VALUES; + } + + public String toString() { + return propertyName + getOp() + otherPropertyName; + } + + public String getOp() { + return op; + } + +} diff --git a/src/org/hibernate/criterion/PropertyProjection.java b/src/org/hibernate/criterion/PropertyProjection.java new file mode 100755 index 0000000000..d08f8d7fbf --- /dev/null +++ b/src/org/hibernate/criterion/PropertyProjection.java @@ -0,0 +1,63 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + +/** + * A property value, or grouped property value + * @author Gavin King + */ +public class PropertyProjection extends SimpleProjection { + + private String propertyName; + private boolean grouped; + + protected PropertyProjection(String prop, boolean grouped) { + this.propertyName = prop; + this.grouped = grouped; + } + + protected PropertyProjection(String prop) { + this(prop, false); + } + + public String getPropertyName() { + return propertyName; + } + + public String toString() { + return propertyName; + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new Type[] { criteriaQuery.getType(criteria, propertyName) }; + } + + public String toSqlString(Criteria criteria, int position, CriteriaQuery criteriaQuery) + throws HibernateException { + return new StringBuffer() + .append( criteriaQuery.getColumn(criteria, propertyName) ) + .append(" as y") + .append(position) + .append('_') + .toString(); + } + + public boolean isGrouped() { + return grouped; + } + + public String toGroupSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + if (!grouped) { + return super.toGroupSqlString(criteria, criteriaQuery); + } + else { + return criteriaQuery.getColumn(criteria, propertyName); + } + } + +} diff --git a/src/org/hibernate/criterion/PropertySubqueryExpression.java b/src/org/hibernate/criterion/PropertySubqueryExpression.java new file mode 100755 index 0000000000..ea0ac25b07 --- /dev/null +++ b/src/org/hibernate/criterion/PropertySubqueryExpression.java @@ -0,0 +1,23 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; + +/** + * A comparison between a property value in the outer query and the + * result of a subquery + * @author Gavin King + */ +public class PropertySubqueryExpression extends SubqueryExpression { + private String propertyName; + + protected PropertySubqueryExpression(String propertyName, String op, String quantifier, DetachedCriteria dc) { + super(op, quantifier, dc); + this.propertyName = propertyName; + } + + protected String toLeftSqlString(Criteria criteria, CriteriaQuery criteriaQuery) { + return criteriaQuery.getColumn(criteria, propertyName); + } + +} diff --git a/src/org/hibernate/criterion/Restrictions.java b/src/org/hibernate/criterion/Restrictions.java new file mode 100755 index 0000000000..40058d9885 --- /dev/null +++ b/src/org/hibernate/criterion/Restrictions.java @@ -0,0 +1,368 @@ +//$Id$ +package org.hibernate.criterion; + +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; + +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; + +/** + * The criterion package may be used by applications as a framework for building + * new kinds of Criterion. However, it is intended that most applications will + * simply use the built-in criterion types via the static factory methods of this class. + * + * @see org.hibernate.Criteria + * @see Projections factory methods for Projection instances + * @author Gavin King + */ +public class Restrictions { + + Restrictions() { + //cannot be instantiated + } + + /** + * Apply an "equal" constraint to the identifier property + * @param propertyName + * @param value + * @return Criterion + */ + public static Criterion idEq(Object value) { + return new IdentifierEqExpression(value); + } + /** + * Apply an "equal" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression eq(String propertyName, Object value) { + return new SimpleExpression(propertyName, value, "="); + } + /** + * Apply a "not equal" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression ne(String propertyName, Object value) { + return new SimpleExpression(propertyName, value, "<>"); + } + /** + * Apply a "like" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression like(String propertyName, Object value) { + return new SimpleExpression(propertyName, value, " like "); + } + /** + * Apply a "like" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression like(String propertyName, String value, MatchMode matchMode) { + return new SimpleExpression(propertyName, matchMode.toMatchString(value), " like " ); + } + /** + * A case-insensitive "like", similar to Postgres ilike + * operator + * + * @param propertyName + * @param value + * @return Criterion + */ + public static Criterion ilike(String propertyName, String value, MatchMode matchMode) { + return new IlikeExpression(propertyName, value, matchMode); + } + /** + * A case-insensitive "like", similar to Postgres ilike + * operator + * + * @param propertyName + * @param value + * @return Criterion + */ + public static Criterion ilike(String propertyName, Object value) { + return new IlikeExpression(propertyName, value); + } + /** + * Apply a "greater than" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression gt(String propertyName, Object value) { + return new SimpleExpression(propertyName, value, ">"); + } + /** + * Apply a "less than" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression lt(String propertyName, Object value) { + return new SimpleExpression(propertyName, value, "<"); + } + /** + * Apply a "less than or equal" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression le(String propertyName, Object value) { + return new SimpleExpression(propertyName, value, "<="); + } + /** + * Apply a "greater than or equal" constraint to the named property + * @param propertyName + * @param value + * @return Criterion + */ + public static SimpleExpression ge(String propertyName, Object value) { + return new SimpleExpression(propertyName, value, ">="); + } + /** + * Apply a "between" constraint to the named property + * @param propertyName + * @param lo value + * @param hi value + * @return Criterion + */ + public static Criterion between(String propertyName, Object lo, Object hi) { + return new BetweenExpression(propertyName, lo, hi); + } + /** + * Apply an "in" constraint to the named property + * @param propertyName + * @param values + * @return Criterion + */ + public static Criterion in(String propertyName, Object[] values) { + return new InExpression(propertyName, values); + } + /** + * Apply an "in" constraint to the named property + * @param propertyName + * @param values + * @return Criterion + */ + public static Criterion in(String propertyName, Collection values) { + return new InExpression( propertyName, values.toArray() ); + } + /** + * Apply an "is null" constraint to the named property + * @return Criterion + */ + public static Criterion isNull(String propertyName) { + return new NullExpression(propertyName); + } + /** + * Apply an "equal" constraint to two properties + */ + public static PropertyExpression eqProperty(String propertyName, String otherPropertyName) { + return new PropertyExpression(propertyName, otherPropertyName, "="); + } + /** + * Apply a "not equal" constraint to two properties + */ + public static PropertyExpression neProperty(String propertyName, String otherPropertyName) { + return new PropertyExpression(propertyName, otherPropertyName, "<>"); + } + /** + * Apply a "less than" constraint to two properties + */ + public static PropertyExpression ltProperty(String propertyName, String otherPropertyName) { + return new PropertyExpression(propertyName, otherPropertyName, "<"); + } + /** + * Apply a "less than or equal" constraint to two properties + */ + public static PropertyExpression leProperty(String propertyName, String otherPropertyName) { + return new PropertyExpression(propertyName, otherPropertyName, "<="); + } + /** + * Apply a "greater than" constraint to two properties + */ + public static PropertyExpression gtProperty(String propertyName, String otherPropertyName) { + return new PropertyExpression(propertyName, otherPropertyName, ">"); + } + /** + * Apply a "greater than or equal" constraint to two properties + */ + public static PropertyExpression geProperty(String propertyName, String otherPropertyName) { + return new PropertyExpression(propertyName, otherPropertyName, ">="); + } + /** + * Apply an "is not null" constraint to the named property + * @return Criterion + */ + public static Criterion isNotNull(String propertyName) { + return new NotNullExpression(propertyName); + } + /** + * Return the conjuction of two expressions + * + * @param lhs + * @param rhs + * @return Criterion + */ + public static LogicalExpression and(Criterion lhs, Criterion rhs) { + return new LogicalExpression(lhs, rhs, "and"); + } + /** + * Return the disjuction of two expressions + * + * @param lhs + * @param rhs + * @return Criterion + */ + public static LogicalExpression or(Criterion lhs, Criterion rhs) { + return new LogicalExpression(lhs, rhs, "or"); + } + /** + * Return the negation of an expression + * + * @param expression + * @return Criterion + */ + public static Criterion not(Criterion expression) { + return new NotExpression(expression); + } + /** + * Apply a constraint expressed in SQL, with the given JDBC + * parameters. Any occurrences of {alias} will be + * replaced by the table alias. + * + * @param sql + * @param values + * @param types + * @return Criterion + */ + public static Criterion sqlRestriction(String sql, Object[] values, Type[] types) { + return new SQLCriterion(sql, values, types); + } + /** + * Apply a constraint expressed in SQL, with the given JDBC + * parameter. Any occurrences of {alias} will be replaced + * by the table alias. + * + * @param sql + * @param value + * @param type + * @return Criterion + */ + public static Criterion sqlRestriction(String sql, Object value, Type type) { + return new SQLCriterion(sql, new Object[] { value }, new Type[] { type } ); + } + /** + * Apply a constraint expressed in SQL. Any occurrences of {alias} + * will be replaced by the table alias. + * + * @param sql + * @return Criterion + */ + public static Criterion sqlRestriction(String sql) { + return new SQLCriterion(sql, ArrayHelper.EMPTY_OBJECT_ARRAY, ArrayHelper.EMPTY_TYPE_ARRAY); + } + + /** + * Group expressions together in a single conjunction (A and B and C...) + * + * @return Conjunction + */ + public static Conjunction conjunction() { + return new Conjunction(); + } + + /** + * Group expressions together in a single disjunction (A or B or C...) + * + * @return Conjunction + */ + public static Disjunction disjunction() { + return new Disjunction(); + } + + /** + * Apply an "equals" constraint to each property in the + * key set of a Map + * + * @param propertyNameValues a map from property names to values + * @return Criterion + */ + public static Criterion allEq(Map propertyNameValues) { + Conjunction conj = conjunction(); + Iterator iter = propertyNameValues.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry me = (Map.Entry) iter.next(); + conj.add( eq( (String) me.getKey(), me.getValue() ) ); + } + return conj; + } + + /** + * Constrain a collection valued property to be empty + */ + public static Criterion isEmpty(String propertyName) { + return new EmptyExpression(propertyName); + } + + /** + * Constrain a collection valued property to be non-empty + */ + public static Criterion isNotEmpty(String propertyName) { + return new NotEmptyExpression(propertyName); + } + + /** + * Constrain a collection valued property by size + */ + public static Criterion sizeEq(String propertyName, int size) { + return new SizeExpression(propertyName, size, "="); + } + + /** + * Constrain a collection valued property by size + */ + public static Criterion sizeNe(String propertyName, int size) { + return new SizeExpression(propertyName, size, "<>"); + } + + /** + * Constrain a collection valued property by size + */ + public static Criterion sizeGt(String propertyName, int size) { + return new SizeExpression(propertyName, size, "<"); + } + + /** + * Constrain a collection valued property by size + */ + public static Criterion sizeLt(String propertyName, int size) { + return new SizeExpression(propertyName, size, ">"); + } + + /** + * Constrain a collection valued property by size + */ + public static Criterion sizeGe(String propertyName, int size) { + return new SizeExpression(propertyName, size, "<="); + } + + /** + * Constrain a collection valued property by size + */ + public static Criterion sizeLe(String propertyName, int size) { + return new SizeExpression(propertyName, size, ">="); + } + + public static NaturalIdentifier naturalId() { + return new NaturalIdentifier(); + } + +} diff --git a/src/org/hibernate/criterion/RowCountProjection.java b/src/org/hibernate/criterion/RowCountProjection.java new file mode 100755 index 0000000000..fa11d5878a --- /dev/null +++ b/src/org/hibernate/criterion/RowCountProjection.java @@ -0,0 +1,35 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + +/** + * A row count + * @author Gavin King + */ +public class RowCountProjection extends SimpleProjection { + + protected RowCountProjection() {} + + public String toString() { + return "count(*)"; + } + + public Type[] getTypes(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new Type[] { Hibernate.INTEGER }; + } + + public String toSqlString(Criteria criteria, int position, CriteriaQuery criteriaQuery) + throws HibernateException { + return new StringBuffer() + .append("count(*) as y") + .append(position) + .append('_') + .toString(); + } + +} diff --git a/src/org/hibernate/criterion/SQLCriterion.java b/src/org/hibernate/criterion/SQLCriterion.java new file mode 100644 index 0000000000..44f6b911a4 --- /dev/null +++ b/src/org/hibernate/criterion/SQLCriterion.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate.criterion; + + +import org.hibernate.Criteria; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; +import org.hibernate.type.Type; +import org.hibernate.util.StringHelper; + +/** + * A SQL fragment. The string {alias} will be replaced by the + * alias of the root entity. + */ +public class SQLCriterion implements Criterion { + + private final String sql; + private final TypedValue[] typedValues; + + public String toSqlString( + Criteria criteria, + CriteriaQuery criteriaQuery) + throws HibernateException { + return StringHelper.replace( sql, "{alias}", criteriaQuery.getSQLAlias(criteria) ); + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return typedValues; + } + + public String toString() { + return sql; + } + + protected SQLCriterion(String sql, Object[] values, Type[] types) { + this.sql = sql; + typedValues = new TypedValue[values.length]; + for ( int i=0; i1) fragment.append('('); + SessionFactoryImplementor factory = criteriaQuery.getFactory(); + int[] sqlTypes = type.sqlTypes( factory ); + for ( int i=0; i1) fragment.append(')'); + return fragment.toString(); + + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + Object icvalue = ignoreCase ? value.toString().toLowerCase() : value; + return new TypedValue[] { criteriaQuery.getTypedValue(criteria, propertyName, icvalue) }; + } + + public String toString() { + return propertyName + getOp() + value; + } + + protected final String getOp() { + return op; + } + +} diff --git a/src/org/hibernate/criterion/SimpleProjection.java b/src/org/hibernate/criterion/SimpleProjection.java new file mode 100755 index 0000000000..c022d33be6 --- /dev/null +++ b/src/org/hibernate/criterion/SimpleProjection.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.type.Type; + + +/** + * A single-column projection that may be aliased + * @author Gavin King + */ +public abstract class SimpleProjection implements Projection { + + public Projection as(String alias) { + return Projections.alias(this, alias); + } + + public String[] getColumnAliases(String alias, int loc) { + return null; + } + + public Type[] getTypes(String alias, Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return null; + } + + public String[] getColumnAliases(int loc) { + return new String[] { "y" + loc + "_" }; + } + + public String[] getAliases() { + return new String[1]; + } + + public String toGroupSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + throw new UnsupportedOperationException("not a grouping projection"); + } + + public boolean isGrouped() { + return false; + } + +} diff --git a/src/org/hibernate/criterion/SimpleSubqueryExpression.java b/src/org/hibernate/criterion/SimpleSubqueryExpression.java new file mode 100755 index 0000000000..6df5b6c6d5 --- /dev/null +++ b/src/org/hibernate/criterion/SimpleSubqueryExpression.java @@ -0,0 +1,35 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; + +/** + * A comparison between a constant value and the the result of a subquery + * @author Gavin King + */ +public class SimpleSubqueryExpression extends SubqueryExpression { + + private Object value; + + protected SimpleSubqueryExpression(Object value, String op, String quantifier, DetachedCriteria dc) { + super(op, quantifier, dc); + this.value = value; + } + + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + TypedValue[] superTv = super.getTypedValues(criteria, criteriaQuery); + TypedValue[] result = new TypedValue[superTv.length+1]; + System.arraycopy(superTv, 0, result, 1, superTv.length); + result[0] = new TypedValue( getTypes()[0], value, EntityMode.POJO ); + return result; + } + + protected String toLeftSqlString(Criteria criteria, CriteriaQuery criteriaQuery) { + return "?"; + } +} diff --git a/src/org/hibernate/criterion/SizeExpression.java b/src/org/hibernate/criterion/SizeExpression.java new file mode 100755 index 0000000000..bd54e596c0 --- /dev/null +++ b/src/org/hibernate/criterion/SizeExpression.java @@ -0,0 +1,63 @@ +//$Id$ +package org.hibernate.criterion; + + +import org.hibernate.Criteria; +import org.hibernate.EntityMode; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.engine.TypedValue; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.Loadable; +import org.hibernate.sql.ConditionFragment; + +/** + * @author Gavin King + */ +public class SizeExpression implements Criterion { + + private final String propertyName; + private final int size; + private final String op; + + protected SizeExpression(String propertyName, int size, String op) { + this.propertyName = propertyName; + this.size = size; + this.op = op; + } + + public String toString() { + return propertyName + ".size" + op + size; + } + + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + String role = criteriaQuery.getEntityName(criteria, propertyName) + + '.' + + criteriaQuery.getPropertyName(propertyName); + QueryableCollection cp = (QueryableCollection) criteriaQuery.getFactory() + .getCollectionPersister(role); + //String[] fk = StringHelper.qualify( "collection_", cp.getKeyColumnNames() ); + String[] fk = cp.getKeyColumnNames(); + String[] pk = ( (Loadable) cp.getOwnerEntityPersister() ).getIdentifierColumnNames(); //TODO: handle property-ref + return "? " + + op + + " (select count(*) from " + + cp.getTableName() + + //" collection_ where " + + " where " + + new ConditionFragment() + .setTableAlias( criteriaQuery.getSQLAlias(criteria, propertyName) ) + .setCondition(pk, fk) + .toFragmentString() + + ")"; + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + return new TypedValue[] { + new TypedValue( Hibernate.INTEGER, new Integer(size), EntityMode.POJO ) + }; + } + +} diff --git a/src/org/hibernate/criterion/Subqueries.java b/src/org/hibernate/criterion/Subqueries.java new file mode 100755 index 0000000000..20079e401f --- /dev/null +++ b/src/org/hibernate/criterion/Subqueries.java @@ -0,0 +1,160 @@ +//$Id$ +package org.hibernate.criterion; + +/** + * Factory class for criterion instances that represent expressions + * involving subqueries. + * + * @see Restriction + * @see Projection + * @see org.hibernate.Criteria + * @author Gavin King + */ +public class Subqueries { + + public static Criterion exists(DetachedCriteria dc) { + return new ExistsSubqueryExpression("exists", dc); + } + + public static Criterion notExists(DetachedCriteria dc) { + return new ExistsSubqueryExpression("not exists", dc); + } + + public static Criterion propertyEqAll(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "=", "all", dc); + } + + public static Criterion propertyIn(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "in", null, dc); + } + + public static Criterion propertyNotIn(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "not in", null, dc); + } + + public static Criterion propertyEq(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "=", null, dc); + } + + public static Criterion propertyNe(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "<>", null, dc); + } + + public static Criterion propertyGt(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, ">", null, dc); + } + + public static Criterion propertyLt(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "<", null, dc); + } + + public static Criterion propertyGe(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, ">=", null, dc); + } + + public static Criterion propertyLe(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "<=", null, dc); + } + + public static Criterion propertyGtAll(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, ">", "all", dc); + } + + public static Criterion propertyLtAll(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "<", "all", dc); + } + + public static Criterion propertyGeAll(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, ">=", "all", dc); + } + + public static Criterion propertyLeAll(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "<=", "all", dc); + } + + public static Criterion propertyGtSome(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, ">", "some", dc); + } + + public static Criterion propertyLtSome(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "<", "some", dc); + } + + public static Criterion propertyGeSome(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, ">=", "some", dc); + } + + public static Criterion propertyLeSome(String propertyName, DetachedCriteria dc) { + return new PropertySubqueryExpression(propertyName, "<=", "some", dc); + } + + public static Criterion eqAll(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "=", "all", dc); + } + + public static Criterion in(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "in", null, dc); + } + + public static Criterion notIn(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "not in", null, dc); + } + + public static Criterion eq(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "=", null, dc); + } + + public static Criterion gt(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, ">", null, dc); + } + + public static Criterion lt(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "<", null, dc); + } + + public static Criterion ge(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, ">=", null, dc); + } + + public static Criterion le(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "<=", null, dc); + } + + public static Criterion ne(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "<>", null, dc); + } + + public static Criterion gtAll(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, ">", "all", dc); + } + + public static Criterion ltAll(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "<", "all", dc); + } + + public static Criterion geAll(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, ">=", "all", dc); + } + + public static Criterion leAll(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "<=", "all", dc); + } + + public static Criterion gtSome(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, ">", "some", dc); + } + + public static Criterion ltSome(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "<", "some", dc); + } + + public static Criterion geSome(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, ">=", "some", dc); + } + + public static Criterion leSome(Object value, DetachedCriteria dc) { + return new SimpleSubqueryExpression(value, "<=", "some", dc); + } + + +} diff --git a/src/org/hibernate/criterion/SubqueryExpression.java b/src/org/hibernate/criterion/SubqueryExpression.java new file mode 100755 index 0000000000..b78fee6b4c --- /dev/null +++ b/src/org/hibernate/criterion/SubqueryExpression.java @@ -0,0 +1,89 @@ +//$Id$ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.TypedValue; +import org.hibernate.impl.CriteriaImpl; +import org.hibernate.loader.criteria.CriteriaQueryTranslator; +import org.hibernate.persister.entity.OuterJoinLoadable; +import org.hibernate.sql.Select; +import org.hibernate.type.Type; + +/** + * @author Gavin King + */ +public abstract class SubqueryExpression implements Criterion { + + private CriteriaImpl criteriaImpl; + private String quantifier; + private String op; + private QueryParameters params; + private Type[] types; + + protected Type[] getTypes() { + return types; + } + + protected SubqueryExpression(String op, String quantifier, DetachedCriteria dc) { + this.criteriaImpl = dc.getCriteriaImpl(); + this.quantifier = quantifier; + this.op = op; + } + + protected abstract String toLeftSqlString(Criteria criteria, CriteriaQuery outerQuery); + + public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + + final SessionImplementor session = ( (CriteriaImpl) criteria ).getSession(); //ugly! + final SessionFactoryImplementor factory = session.getFactory(); + + final OuterJoinLoadable persister = (OuterJoinLoadable) factory.getEntityPersister( criteriaImpl.getEntityOrClassName() ); + CriteriaQueryTranslator innerQuery = new CriteriaQueryTranslator( + factory, + criteriaImpl, + criteriaImpl.getEntityOrClassName(), //implicit polymorphism not supported (would need a union) + criteriaQuery.generateSQLAlias(), + criteriaQuery + ); + + params = innerQuery.getQueryParameters(); //TODO: bad lifecycle.... + types = innerQuery.getProjectedTypes(); + + //String filter = persister.filterFragment( innerQuery.getRootSQLALias(), session.getEnabledFilters() ); + + String sql = new Select( factory.getDialect() ) + .setWhereClause( innerQuery.getWhereCondition() ) + .setGroupByClause( innerQuery.getGroupBy() ) + .setSelectClause( innerQuery.getSelect() ) + .setFromClause( + persister.fromTableFragment( innerQuery.getRootSQLALias() ) + + persister.fromJoinFragment( innerQuery.getRootSQLALias(), true, false ) + ) + .toStatementString(); + + final StringBuffer buf = new StringBuffer() + .append( toLeftSqlString(criteria, criteriaQuery) ); + if (op!=null) buf.append(' ').append(op).append(' '); + if (quantifier!=null) buf.append(quantifier).append(' '); + return buf.append('(').append(sql).append(')') + .toString(); + } + + public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) + throws HibernateException { + Type[] types = params.getPositionalParameterTypes(); + Object[] values = params.getPositionalParameterValues(); + TypedValue[] tv = new TypedValue[types.length]; + for ( int i=0; i + + +

    + A framework for defining restriction criteria and order criteria. +

    + + diff --git a/src/org/hibernate/dialect/Cache71Dialect.java b/src/org/hibernate/dialect/Cache71Dialect.java new file mode 100644 index 0000000000..25cd94f26c --- /dev/null +++ b/src/org/hibernate/dialect/Cache71Dialect.java @@ -0,0 +1,655 @@ +//$Id: $ +package org.hibernate.dialect; + +import java.sql.CallableStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.NvlFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.dialect.function.StandardJDBCEscapeFunction; +import org.hibernate.dialect.function.ConvertFunction; +import org.hibernate.dialect.function.ConditionalParenthesisFunction; +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; +import org.hibernate.dialect.lock.UpdateLockingStrategy; +import org.hibernate.exception.CacheSQLStateConverter; +import org.hibernate.exception.SQLExceptionConverter; +import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.hibernate.id.IdentityGenerator; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.sql.CacheJoinFragment; +import org.hibernate.sql.JoinFragment; +import org.hibernate.util.StringHelper; + +/** + * Caché 2007.1 dialect. This class is required in order to use Hibernate with Intersystems Caché SQL.
    + *
    + * Compatible with Caché 2007.1. + *
    + * + * Caché and Hibernate + * + * + *

    Caché and Hibernate

    + *

    PREREQUISITES

    + * These setup instructions assume that both Caché and Hibernate are installed and operational. + *
    + *

    HIBERNATE DIRECTORIES AND FILES

    + * JBoss distributes the InterSystems Cache' dialect for Hibernate 3.2.1 + * For earlier versions of Hibernate please contact + * InterSystems Worldwide Response Center (WRC) + * for the appropriate source files. + *
    + *

    CACHÉ DOCUMENTATION

    + * Documentation for Caché is available online when Caché is running. + * It can also be obtained from the + * InterSystems website. + * The book, "Object-oriented Application Development Using the Caché Post-relational Database: + * is also available from Springer-Verlag. + *
    + *

    HIBERNATE DOCUMENTATION

    + * Hibernate comes with extensive electronic documentation. + * In addition, several books on Hibernate are available from + * Manning Publications Co. + * Three available titles are "Hibernate Quickly", "Hibernate in Action", and "Java Persistence with Hibernate". + *
    + *

    TO SET UP HIBERNATE FOR USE WITH CACHÉ

    + * The following steps assume that the directory where Caché was installed is C:\CacheSys. + * This is the default installation directory for Caché. + * The default installation directory for Hibernate is assumed to be C:\Hibernate. + *

    + * If either product is installed in a different location, the pathnames that follow should be modified appropriately. + *

    + * Caché version 2007.1 and above is recommended for use with + * Hibernate. The next step depends on the location of your + * CacheDB.jar depending on your version of Caché. + *

      + *
    1. Copy C:\CacheSys\dev\java\lib\JDK15\CacheDB.jar to C:\Hibernate\lib\CacheDB.jar.
    2. + *

      + *

    3. Insert the following files into your Java classpath: + *

      + *

        + *
      • All jar files in the directory C:\Hibernate\lib
      • + *
      • The directory (or directories) where hibernate.properties and/or hibernate.cfg.xml are kept.
      • + *
      + *
    4. + *

      + *

    5. In the file, hibernate.properties (or hibernate.cfg.xml), + * specify the Caché dialect and the Caché version URL settings.
    6. + *
    + *

    + * For example, in Hibernate 3.2, typical entries in hibernate.properties would have the following + * "name=value" pairs: + *

    + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
    Property NameProperty Value
    hibernate.dialectorg.hibernate.dialect.Cache71Dialect
    hibernate.connection.driver_classcom.intersys.jdbc.CacheDriver
    hibernate.connection.username(see note 1)
    hibernate.connection.password(see note 1)
    hibernate.connection.urljdbc:Cache://127.0.0.1:1972/USER
    + *

    + *

    + *
    Note 1
    + *
    Please contact your administrator for the userid and password you should use when attempting access via JDBC. + * By default, these are chosen to be "_SYSTEM" and "SYS" respectively as noted in the SQL standard.
    + *
    + *
    + *

    CACHÉ VERSION URL

    + * This is the standard URL for the JDBC driver. + * For a JDBC driver on the machine hosting Caché, use the IP "loopback" address, 127.0.0.1. + * For 1972, the default port, specify the super server port of your Caché instance. + * For USER, substitute the NAMESPACE which contains your Caché database data. + *
    + *

    CACHÉ DIALECTS

    + * Choices for Dialect are: + *
    + *

    + *

      + *
    1. org.hibernate.dialect.Cache71Dialect (requires Caché + * 2007.1 or above)
    2. + *

      + *

    + *
    + *

    SUPPORT FOR IDENTITY COLUMNS

    + * Caché 2007.1 or later supports identity columns. For + * Hibernate to use identity columns, specify "native" as the + * generator. + *
    + *

    SEQUENCE DIALECTS SUPPORT SEQUENCES

    + *

    + * To use Hibernate sequence support with Caché in a namespace, you must FIRST load the following file into that namespace: + *

    + *     etc\CacheSequences.xml
    + * 
    + * For example, at the COS terminal prompt in the namespace, run the + * following command: + *

    + * d LoadFile^%apiOBJ("c:\hibernate\etc\CacheSequences.xml","ck") + *

    + * In your Hibernate mapping you can specify sequence use. + *

    + * For example, the following shows the use of a sequence generator in a Hibernate mapping: + *

    + *     <id name="id" column="uid" type="long" unsaved-value="null">
    + *         <generator class="sequence"/>
    + *     </id>
    + * 
    + *
    + *

    + * Some versions of Hibernate under some circumstances call + * getSelectSequenceNextValString() in the dialect. If this happens + * you will receive the error message: new MappingException( "Dialect + * does not support sequences" ). + *
    + *

    HIBERNATE FILES ASSOCIATED WITH CACHÉ DIALECT

    + * The following files are associated with Caché dialect: + *

    + *

      + *
    1. src\org\hibernate\dialect\Cache71Dialect.java
    2. + *
    3. src\org\hibernate\dialect\function\ConditionalParenthesisFunction.java
    4. + *
    5. src\org\hibernate\dialect\function\ConvertFunction.java
    6. + *
    7. src\org\hibernate\exception\CacheSQLStateConverter.java
    8. + *
    9. src\org\hibernate\sql\CacheJoinFragment.java
    10. + *
    + * Cache71Dialect ships with Hibernate 3.2. All other dialects are distributed by InterSystems and subclass Cache71Dialect. + * + * @author Jonathan Levinson + */ + +public class Cache71Dialect extends Dialect { + + /** + * Creates new Caché71Dialect instance. Sets up the JDBC / + * Caché type mappings. + */ + public Cache71Dialect() { + super(); + commonRegistration(); + register71Functions(); + } + + protected final void commonRegistration() { + // Note: For object <-> SQL datatype mappings see: + // Configuration Manager | Advanced | SQL | System DDL Datatype Mappings + // + // TBD registerColumnType(Types.BINARY, "binary($1)"); + // changed 08-11-2005, jsl + registerColumnType( Types.BINARY, "varbinary($1)" ); + registerColumnType( Types.BIGINT, "BigInt" ); + registerColumnType( Types.BIT, "bit" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.DECIMAL, "decimal" ); + registerColumnType( Types.DOUBLE, "double" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.LONGVARBINARY, "longvarbinary" ); // binary %Stream + registerColumnType( Types.LONGVARCHAR, "longvarchar" ); // character %Stream + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + registerColumnType( Types.REAL, "real" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TINYINT, "tinyint" ); + // TBD should this be varbinary($1)? + // registerColumnType(Types.VARBINARY, "binary($1)"); + registerColumnType( Types.VARBINARY, "longvarbinary" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.BLOB, "longvarbinary" ); + registerColumnType( Types.CLOB, "longvarchar" ); + + getDefaultProperties().setProperty( Environment.USE_STREAMS_FOR_BINARY, "false" ); + getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); + //getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, NO_BATCH); + + getDefaultProperties().setProperty( Environment.USE_SQL_COMMENTS, "false" ); + + registerFunction( "abs", new StandardSQLFunction( "abs" ) ); + registerFunction( "acos", new StandardJDBCEscapeFunction( "acos", Hibernate.DOUBLE ) ); + registerFunction( "%alphaup", new StandardSQLFunction( "%alphaup", Hibernate.STRING ) ); + registerFunction( "ascii", new StandardSQLFunction( "ascii", Hibernate.STRING ) ); + registerFunction( "asin", new StandardJDBCEscapeFunction( "asin", Hibernate.DOUBLE ) ); + registerFunction( "atan", new StandardJDBCEscapeFunction( "atan", Hibernate.DOUBLE ) ); + registerFunction( "bit_length", new SQLFunctionTemplate( Hibernate.INTEGER, "($length(?1)*8)" ) ); + // hibernate impelemnts cast in Dialect.java + registerFunction( "ceiling", new StandardSQLFunction( "ceiling", Hibernate.INTEGER ) ); + registerFunction( "char", new StandardJDBCEscapeFunction( "char", Hibernate.CHARACTER ) ); + registerFunction( "character_length", new StandardSQLFunction( "character_length", Hibernate.INTEGER ) ); + registerFunction( "char_length", new StandardSQLFunction( "char_length", Hibernate.INTEGER ) ); + registerFunction( "cos", new StandardJDBCEscapeFunction( "cos", Hibernate.DOUBLE ) ); + registerFunction( "cot", new StandardJDBCEscapeFunction( "cot", Hibernate.DOUBLE ) ); + registerFunction( "coalesce", new VarArgsSQLFunction( "coalesce(", ",", ")" ) ); + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "", "||", "" ) ); + registerFunction( "convert", new ConvertFunction() ); + registerFunction( "curdate", new StandardJDBCEscapeFunction( "curdate", Hibernate.DATE ) ); + registerFunction( "current_date", new NoArgSQLFunction( "current_date", Hibernate.DATE, false ) ); + registerFunction( "current_time", new NoArgSQLFunction( "current_time", Hibernate.TIME, false ) ); + registerFunction( + "current_timestamp", new ConditionalParenthesisFunction( "current_timestamp", Hibernate.TIMESTAMP ) + ); + registerFunction( "curtime", new StandardJDBCEscapeFunction( "curtime", Hibernate.TIME ) ); + registerFunction( "database", new StandardJDBCEscapeFunction( "database", Hibernate.STRING ) ); + registerFunction( "dateadd", new VarArgsSQLFunction( Hibernate.TIMESTAMP, "dateadd(", ",", ")" ) ); + registerFunction( "datediff", new VarArgsSQLFunction( Hibernate.INTEGER, "datediff(", ",", ")" ) ); + registerFunction( "datename", new VarArgsSQLFunction( Hibernate.STRING, "datename(", ",", ")" ) ); + registerFunction( "datepart", new VarArgsSQLFunction( Hibernate.INTEGER, "datepart(", ",", ")" ) ); + registerFunction( "day", new StandardSQLFunction( "day", Hibernate.INTEGER ) ); + registerFunction( "dayname", new StandardJDBCEscapeFunction( "dayname", Hibernate.STRING ) ); + registerFunction( "dayofmonth", new StandardJDBCEscapeFunction( "dayofmonth", Hibernate.INTEGER ) ); + registerFunction( "dayofweek", new StandardJDBCEscapeFunction( "dayofweek", Hibernate.INTEGER ) ); + registerFunction( "dayofyear", new StandardJDBCEscapeFunction( "dayofyear", Hibernate.INTEGER ) ); + // is it necessary to register %exact since it can only appear in a where clause? + registerFunction( "%exact", new StandardSQLFunction( "%exact", Hibernate.STRING ) ); + registerFunction( "exp", new StandardJDBCEscapeFunction( "exp", Hibernate.DOUBLE ) ); + registerFunction( "%external", new StandardSQLFunction( "%external", Hibernate.STRING ) ); + registerFunction( "$extract", new VarArgsSQLFunction( Hibernate.INTEGER, "$extract(", ",", ")" ) ); + registerFunction( "$find", new VarArgsSQLFunction( Hibernate.INTEGER, "$find(", ",", ")" ) ); + registerFunction( "floor", new StandardSQLFunction( "floor", Hibernate.INTEGER ) ); + registerFunction( "getdate", new StandardSQLFunction( "getdate", Hibernate.TIMESTAMP ) ); + registerFunction( "hour", new StandardJDBCEscapeFunction( "hour", Hibernate.INTEGER ) ); + registerFunction( "ifnull", new VarArgsSQLFunction( "ifnull(", ",", ")" ) ); + registerFunction( "%internal", new StandardSQLFunction( "%internal" ) ); + registerFunction( "isnull", new VarArgsSQLFunction( "isnull(", ",", ")" ) ); + registerFunction( "isnumeric", new StandardSQLFunction( "isnumeric", Hibernate.INTEGER ) ); + registerFunction( "lcase", new StandardJDBCEscapeFunction( "lcase", Hibernate.STRING ) ); + registerFunction( "left", new StandardJDBCEscapeFunction( "left", Hibernate.STRING ) ); + registerFunction( "len", new StandardSQLFunction( "len", Hibernate.INTEGER ) ); + registerFunction( "length", new StandardSQLFunction( "length", Hibernate.INTEGER ) ); + registerFunction( "$length", new VarArgsSQLFunction( "$length(", ",", ")" ) ); + // aggregate functions shouldn't be registered, right? + //registerFunction( "list", new StandardSQLFunction("list",Hibernate.STRING) ); + // stopped on $list + registerFunction( "$list", new VarArgsSQLFunction( "$list(", ",", ")" ) ); + registerFunction( "$listdata", new VarArgsSQLFunction( "$listdata(", ",", ")" ) ); + registerFunction( "$listfind", new VarArgsSQLFunction( "$listfind(", ",", ")" ) ); + registerFunction( "$listget", new VarArgsSQLFunction( "$listget(", ",", ")" ) ); + registerFunction( "$listlength", new StandardSQLFunction( "$listlength", Hibernate.INTEGER ) ); + registerFunction( "locate", new StandardSQLFunction( "$FIND", Hibernate.INTEGER ) ); + registerFunction( "log", new StandardJDBCEscapeFunction( "log", Hibernate.DOUBLE ) ); + registerFunction( "log10", new StandardJDBCEscapeFunction( "log", Hibernate.DOUBLE ) ); + registerFunction( "lower", new StandardSQLFunction( "lower" ) ); + registerFunction( "ltrim", new StandardSQLFunction( "ltrim" ) ); + registerFunction( "minute", new StandardJDBCEscapeFunction( "minute", Hibernate.INTEGER ) ); + registerFunction( "mod", new StandardJDBCEscapeFunction( "mod", Hibernate.DOUBLE ) ); + registerFunction( "month", new StandardJDBCEscapeFunction( "month", Hibernate.INTEGER ) ); + registerFunction( "monthname", new StandardJDBCEscapeFunction( "monthname", Hibernate.STRING ) ); + registerFunction( "now", new StandardJDBCEscapeFunction( "monthname", Hibernate.TIMESTAMP ) ); + registerFunction( "nullif", new VarArgsSQLFunction( "nullif(", ",", ")" ) ); + registerFunction( "nvl", new NvlFunction() ); + registerFunction( "%odbcin", new StandardSQLFunction( "%odbcin" ) ); + registerFunction( "%odbcout", new StandardSQLFunction( "%odbcin" ) ); + registerFunction( "%pattern", new VarArgsSQLFunction( Hibernate.STRING, "", "%pattern", "" ) ); + registerFunction( "pi", new StandardJDBCEscapeFunction( "pi", Hibernate.DOUBLE ) ); + registerFunction( "$piece", new VarArgsSQLFunction( Hibernate.STRING, "$piece(", ",", ")" ) ); + registerFunction( "position", new VarArgsSQLFunction( Hibernate.INTEGER, "position(", " in ", ")" ) ); + registerFunction( "power", new VarArgsSQLFunction( Hibernate.STRING, "power(", ",", ")" ) ); + registerFunction( "quarter", new StandardJDBCEscapeFunction( "quarter", Hibernate.INTEGER ) ); + registerFunction( "repeat", new VarArgsSQLFunction( Hibernate.STRING, "repeat(", ",", ")" ) ); + registerFunction( "replicate", new VarArgsSQLFunction( Hibernate.STRING, "replicate(", ",", ")" ) ); + registerFunction( "right", new StandardJDBCEscapeFunction( "right", Hibernate.STRING ) ); + registerFunction( "round", new VarArgsSQLFunction( Hibernate.FLOAT, "round(", ",", ")" ) ); + registerFunction( "rtrim", new StandardSQLFunction( "rtrim", Hibernate.STRING ) ); + registerFunction( "second", new StandardJDBCEscapeFunction( "second", Hibernate.INTEGER ) ); + registerFunction( "sign", new StandardSQLFunction( "sign", Hibernate.INTEGER ) ); + registerFunction( "sin", new StandardJDBCEscapeFunction( "sin", Hibernate.DOUBLE ) ); + registerFunction( "space", new StandardSQLFunction( "space", Hibernate.STRING ) ); + registerFunction( "%sqlstring", new VarArgsSQLFunction( Hibernate.STRING, "%sqlstring(", ",", ")" ) ); + registerFunction( "%sqlupper", new VarArgsSQLFunction( Hibernate.STRING, "%sqlupper(", ",", ")" ) ); + registerFunction( "sqrt", new StandardJDBCEscapeFunction( "SQRT", Hibernate.DOUBLE ) ); + registerFunction( "%startswith", new VarArgsSQLFunction( Hibernate.STRING, "", "%startswith", "" ) ); + // below is for Cache' that don't have str in 2007.1 there is str and we register str directly + registerFunction( "str", new SQLFunctionTemplate( Hibernate.STRING, "cast(?1 as char varying)" ) ); + registerFunction( "string", new VarArgsSQLFunction( Hibernate.STRING, "string(", ",", ")" ) ); + // note that %string is deprecated + registerFunction( "%string", new VarArgsSQLFunction( Hibernate.STRING, "%string(", ",", ")" ) ); + registerFunction( "substr", new VarArgsSQLFunction( Hibernate.STRING, "substr(", ",", ")" ) ); + registerFunction( "substring", new VarArgsSQLFunction( Hibernate.STRING, "substring(", ",", ")" ) ); + registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", Hibernate.TIMESTAMP, false ) ); + registerFunction( "tan", new StandardJDBCEscapeFunction( "tan", Hibernate.DOUBLE ) ); + registerFunction( "timestampadd", new StandardJDBCEscapeFunction( "timestampadd", Hibernate.DOUBLE ) ); + registerFunction( "timestampdiff", new StandardJDBCEscapeFunction( "timestampdiff", Hibernate.DOUBLE ) ); + registerFunction( "tochar", new VarArgsSQLFunction( Hibernate.STRING, "tochar(", ",", ")" ) ); + registerFunction( "to_char", new VarArgsSQLFunction( Hibernate.STRING, "to_char(", ",", ")" ) ); + registerFunction( "todate", new VarArgsSQLFunction( Hibernate.STRING, "todate(", ",", ")" ) ); + registerFunction( "to_date", new VarArgsSQLFunction( Hibernate.STRING, "todate(", ",", ")" ) ); + registerFunction( "tonumber", new StandardSQLFunction( "tonumber" ) ); + registerFunction( "to_number", new StandardSQLFunction( "tonumber" ) ); + // TRIM(end_keyword string-expression-1 FROM string-expression-2) + // use Hibernate implementation "From" is one of the parameters they pass in position ?3 + //registerFunction( "trim", new SQLFunctionTemplate(Hibernate.STRING, "trim(?1 ?2 from ?3)") ); + registerFunction( "truncate", new StandardJDBCEscapeFunction( "truncate", Hibernate.STRING ) ); + registerFunction( "ucase", new StandardJDBCEscapeFunction( "ucase", Hibernate.STRING ) ); + registerFunction( "upper", new StandardSQLFunction( "upper" ) ); + // %upper is deprecated + registerFunction( "%upper", new StandardSQLFunction( "%upper" ) ); + registerFunction( "user", new StandardJDBCEscapeFunction( "user", Hibernate.STRING ) ); + registerFunction( "week", new StandardJDBCEscapeFunction( "user", Hibernate.INTEGER ) ); + registerFunction( "xmlconcat", new VarArgsSQLFunction( Hibernate.STRING, "xmlconcat(", ",", ")" ) ); + registerFunction( "xmlelement", new VarArgsSQLFunction( Hibernate.STRING, "xmlelement(", ",", ")" ) ); + // xmlforest requires a new kind of function constructor + registerFunction( "year", new StandardJDBCEscapeFunction( "year", Hibernate.INTEGER ) ); + } + + protected final void register71Functions() { + this.registerFunction( "str", new VarArgsSQLFunction( Hibernate.STRING, "str(", ",", ")" ) ); + } + + // DDL support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean hasAlterTable() { + // Does this dialect support the ALTER TABLE syntax? + return true; + } + + public boolean qualifyIndexName() { + // Do we need to qualify index names with the schema name? + return false; + } + + public boolean supportsUnique() { + // Does this dialect support the UNIQUE column syntax? + return true; + } + + /** + * The syntax used to add a foreign key constraint to a table. + * + * @return String + */ + public String getAddForeignKeyConstraintString( + String constraintName, + String[] foreignKey, + String referencedTable, + String[] primaryKey, + boolean referencesPrimaryKey) { + // The syntax used to add a foreign key constraint to a table. + return new StringBuffer( 300 ) + .append( " ADD CONSTRAINT " ) + .append( constraintName ) + .append( " FOREIGN KEY " ) + .append( constraintName ) + .append( " (" ) + .append( StringHelper.join( ", ", foreignKey ) ) // identifier-commalist + .append( ") REFERENCES " ) + .append( referencedTable ) + .append( " (" ) + .append( StringHelper.join( ", ", primaryKey ) ) // identifier-commalist + .append( ") " ) + .toString(); + } + + public boolean supportsCheck() { + // Does this dialect support check constraints? + return false; + } + + public String getAddColumnString() { + // The syntax used to add a column to a table + return " add column"; + } + + public String getCascadeConstraintsString() { + // Completely optional cascading drop clause. + return ""; + } + + public boolean dropConstraints() { + // Do we need to drop constraints before dropping tables in this dialect? + return true; + } + + public boolean supportsCascadeDelete() { + return true; + } + + public boolean hasSelfReferentialForeignKeyBug() { + return true; + } + + // temporary table support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsTemporaryTables() { + return true; + } + + public String generateTemporaryTableName(String baseTableName) { + String name = super.generateTemporaryTableName( baseTableName ); + return name.length() > 25 ? name.substring( 1, 25 ) : name; + } + + public String getCreateTemporaryTableString() { + return "create global temporary table"; + } + + public Boolean performTemporaryTableDDLInIsolation() { + return Boolean.FALSE; + } + + public String getCreateTemporaryTablePostfix() { + return ""; + } + + public boolean dropTemporaryTableAfterUse() { + return true; + } + + // IDENTITY support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsIdentityColumns() { + return true; + } + + public Class getNativeIdentifierGeneratorClass() { + return IdentityGenerator.class; + } + + public boolean hasDataTypeInIdentityColumn() { + // Whether this dialect has an Identity clause added to the data type or a completely seperate identity + // data type + return true; + } + + public String getIdentityColumnString() throws MappingException { + // The keyword used to specify an identity column, if identity column key generation is supported. + return "identity"; + } + + public String getIdentitySelectString() { + return "SELECT LAST_IDENTITY() FROM %TSQL_sys.snf"; + } + + // SEQUENCE support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsSequences() { + return false; + } + +// It really does support sequences, but InterSystems elects to suggest usage of IDENTITY instead :/ +// Anyway, below are the actual support overrides for users wanting to use this combo... +// +// public String getSequenceNextValString(String sequenceName) { +// return "select InterSystems.Sequences_GetNext('" + sequenceName + "') from InterSystems.Sequences where ucase(name)=ucase('" + sequenceName + "')"; +// } +// +// public String getSelectSequenceNextValString(String sequenceName) { +// return "(select InterSystems.Sequences_GetNext('" + sequenceName + "') from InterSystems.Sequences where ucase(name)=ucase('" + sequenceName + "'))"; +// } +// +// public String getCreateSequenceString(String sequenceName) { +// return "insert into InterSystems.Sequences(Name) values (ucase('" + sequenceName + "'))"; +// } +// +// public String getDropSequenceString(String sequenceName) { +// return "delete from InterSystems.Sequences where ucase(name)=ucase('" + sequenceName + "')"; +// } +// +// public String getQuerySequencesString() { +// return "select name from InterSystems.Sequences"; +// } + + // lock acquisition support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsForUpdate() { + // Does this dialect support the FOR UPDATE syntax? + return false; + } + + public boolean supportsForUpdateOf() { + // Does this dialect support FOR UPDATE OF, allowing particular rows to be locked? + return false; + } + + public boolean supportsForUpdateNowait() { + // Does this dialect support the Oracle-style FOR UPDATE NOWAIT syntax? + return false; + } + + public boolean supportsOuterJoinForUpdate() { + return false; + } + + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + // InterSystems Cache' does not current support "SELECT ... FOR UPDATE" syntax... + // Set your transaction mode to READ_COMMITTED before using + if ( lockMode.greaterThan( LockMode.READ ) ) { + return new UpdateLockingStrategy( lockable, lockMode ); + } + else { + return new SelectLockingStrategy( lockable, lockMode ); + } + } + + // LIMIT support (ala TOP) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsLimit() { + return true; + } + + public boolean supportsLimitOffset() { + return false; + } + + public boolean supportsVariableLimit() { + return true; + } + + public boolean bindLimitParametersFirst() { + // Does the LIMIT clause come at the start of the SELECT statement, rather than at the end? + return true; + } + + public boolean useMaxForLimit() { + // Does the LIMIT clause take a "maximum" row number instead of a total number of returned rows? + return true; + } + + public String getLimitString(String sql, boolean hasOffset) { + if ( hasOffset ) { + throw new UnsupportedOperationException( "An offset may not be specified to in Cache SQL" ); + } + + // This does not support the Cache SQL 'DISTINCT BY (comma-list)' extensions, + // but this extension is not supported through Hibernate anyway. + int insertionPoint = sql.startsWith( "select distinct" ) ? 15 : 6; + + return new StringBuffer( sql.length() + 8 ) + .append( sql ) + .insert( insertionPoint, " TOP ? " ) + .toString(); + } + + // callable statement support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public int registerResultSetOutParameter(CallableStatement statement, int col) throws SQLException { + return col; + } + + public ResultSet getResultSet(CallableStatement ps) throws SQLException { + ps.execute(); + return ( ResultSet ) ps.getObject( 1 ); + } + + // miscellaneous support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public String getLowercaseFunction() { + // The name of the SQL function that transforms a string to lowercase + return "lower"; + } + + public String getNullColumnString() { + // The keyword used to specify a nullable column. + return " null"; + } + + public JoinFragment createOuterJoinFragment() { + // Create an OuterJoinGenerator for this dialect. + return new CacheJoinFragment(); + } + + public String getNoColumnsInsertString() { + // The keyword used to insert a row without specifying + // any column values + return " default values"; + } + + public SQLExceptionConverter buildSQLExceptionConverter() { + return new CacheSQLStateConverter( EXTRACTER ); + } + + public static final ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() { + /** + * Extract the name of the violated constraint from the given SQLException. + * + * @param sqle The exception that was the result of the constraint violation. + * @return The extracted constraint name. + */ + public String extractConstraintName(SQLException sqle) { + return extractUsingTemplate( "constraint (", ") violated", sqle.getMessage() ); + } + }; + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } + + public boolean areStringComparisonsCaseInsensitive() { + return true; + } + + public boolean supportsResultSetPositionQueryMethodsOnForwardOnlyCursor() { + return false; + } +} diff --git a/src/org/hibernate/dialect/DB2390Dialect.java b/src/org/hibernate/dialect/DB2390Dialect.java new file mode 100644 index 0000000000..e6974893a5 --- /dev/null +++ b/src/org/hibernate/dialect/DB2390Dialect.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate.dialect; + +/** + * An SQL dialect for DB2/390. This class provides support for + * DB2 Universal Database for OS/390, also known as DB2/390. + * + * @author Kristoffer Dyrkorn + */ +public class DB2390Dialect extends DB2Dialect { + + public boolean supportsSequences() { + return false; + } + + public String getIdentitySelectString() { + return "select identity_val_local() from sysibm.sysdummy1"; + } + + public boolean supportsLimit() { + return true; + } + + public boolean supportsLimitOffset() { + return false; + } + + public String getLimitString(String sql, int offset, int limit) { + return new StringBuffer(sql.length() + 40) + .append(sql) + .append(" fetch first ") + .append(limit) + .append(" rows only ") + .toString(); + } + + public boolean useMaxForLimit() { + return true; + } + + public boolean supportsVariableLimit() { + return false; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/DB2400Dialect.java b/src/org/hibernate/dialect/DB2400Dialect.java new file mode 100644 index 0000000000..c0ecac98d5 --- /dev/null +++ b/src/org/hibernate/dialect/DB2400Dialect.java @@ -0,0 +1,44 @@ +package org.hibernate.dialect; + +/** +* An SQL dialect for DB2/400 +* @author Peter DeGregorio (pdegregorio) +* This class provides support for DB2 Universal Database for iSeries, +* also known as DB2/400. +*/ +public class DB2400Dialect extends DB2Dialect { + + public boolean supportsSequences() { + return false; + } + + public String getIdentitySelectString() { + return "select identity_val_local() from sysibm.sysdummy1"; + } + + public boolean supportsLimit() { + return true; + } + + public boolean supportsLimitOffset() { + return false; + } + + public String getLimitString(String sql, int offset, int limit) { + return new StringBuffer(sql.length() + 40) + .append(sql) + .append(" fetch first ") + .append(limit) + .append(" rows only ") + .toString(); + } + + public boolean useMaxForLimit() { + return true; + } + + public boolean supportsVariableLimit() { + return false; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/DB2Dialect.java b/src/org/hibernate/dialect/DB2Dialect.java new file mode 100644 index 0000000000..479a39a797 --- /dev/null +++ b/src/org/hibernate/dialect/DB2Dialect.java @@ -0,0 +1,369 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.CallableStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.dialect.function.AnsiTrimEmulationFunction; + +/** + * An SQL dialect for DB2. + * @author Gavin King + */ +public class DB2Dialect extends Dialect { + + public DB2Dialect() { + super(); + registerColumnType( Types.BIT, "smallint" ); + registerColumnType( Types.BIGINT, "bigint" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "smallint" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "varchar($l) for bit data" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + registerColumnType( Types.BLOB, "blob($l)" ); + registerColumnType( Types.CLOB, "clob($l)" ); + + registerFunction("abs", new StandardSQLFunction("abs") ); + registerFunction("absval", new StandardSQLFunction("absval") ); + registerFunction("sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction("ceiling", new StandardSQLFunction("ceiling") ); + registerFunction("ceil", new StandardSQLFunction("ceil") ); + registerFunction("floor", new StandardSQLFunction("floor") ); + registerFunction("round", new StandardSQLFunction("round") ); + + registerFunction("acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction("asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction("atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction("cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction("cot", new StandardSQLFunction("cot", Hibernate.DOUBLE) ); + registerFunction("degrees", new StandardSQLFunction("degrees", Hibernate.DOUBLE) ); + registerFunction("exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction("float", new StandardSQLFunction("float", Hibernate.DOUBLE) ); + registerFunction("hex", new StandardSQLFunction("hex", Hibernate.STRING) ); + registerFunction("ln", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction("log", new StandardSQLFunction("log", Hibernate.DOUBLE) ); + registerFunction("log10", new StandardSQLFunction("log10", Hibernate.DOUBLE) ); + registerFunction("radians", new StandardSQLFunction("radians", Hibernate.DOUBLE) ); + registerFunction("rand", new NoArgSQLFunction("rand", Hibernate.DOUBLE) ); + registerFunction("sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction("soundex", new StandardSQLFunction("soundex", Hibernate.STRING) ); + registerFunction("sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction("stddev", new StandardSQLFunction("stddev", Hibernate.DOUBLE) ); + registerFunction("tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + registerFunction("variance", new StandardSQLFunction("variance", Hibernate.DOUBLE) ); + + registerFunction("julian_day", new StandardSQLFunction("julian_day", Hibernate.INTEGER) ); + registerFunction("microsecond", new StandardSQLFunction("microsecond", Hibernate.INTEGER) ); + registerFunction("midnight_seconds", new StandardSQLFunction("midnight_seconds", Hibernate.INTEGER) ); + registerFunction("minute", new StandardSQLFunction("minute", Hibernate.INTEGER) ); + registerFunction("month", new StandardSQLFunction("month", Hibernate.INTEGER) ); + registerFunction("monthname", new StandardSQLFunction("monthname", Hibernate.STRING) ); + registerFunction("quarter", new StandardSQLFunction("quarter", Hibernate.INTEGER) ); + registerFunction("hour", new StandardSQLFunction("hour", Hibernate.INTEGER) ); + registerFunction("second", new StandardSQLFunction("second", Hibernate.INTEGER) ); + registerFunction("current_date", new NoArgSQLFunction("current date", Hibernate.DATE, false) ); + registerFunction("date", new StandardSQLFunction("date", Hibernate.DATE) ); + registerFunction("day", new StandardSQLFunction("day", Hibernate.INTEGER) ); + registerFunction("dayname", new StandardSQLFunction("dayname", Hibernate.STRING) ); + registerFunction("dayofweek", new StandardSQLFunction("dayofweek", Hibernate.INTEGER) ); + registerFunction("dayofweek_iso", new StandardSQLFunction("dayofweek_iso", Hibernate.INTEGER) ); + registerFunction("dayofyear", new StandardSQLFunction("dayofyear", Hibernate.INTEGER) ); + registerFunction("days", new StandardSQLFunction("days", Hibernate.LONG) ); + registerFunction("current_time", new NoArgSQLFunction("current time", Hibernate.TIME, false) ); + registerFunction("time", new StandardSQLFunction("time", Hibernate.TIME) ); + registerFunction("current_timestamp", new NoArgSQLFunction("current timestamp", Hibernate.TIMESTAMP, false) ); + registerFunction("timestamp", new StandardSQLFunction("timestamp", Hibernate.TIMESTAMP) ); + registerFunction("timestamp_iso", new StandardSQLFunction("timestamp_iso", Hibernate.TIMESTAMP) ); + registerFunction("week", new StandardSQLFunction("week", Hibernate.INTEGER) ); + registerFunction("week_iso", new StandardSQLFunction("week_iso", Hibernate.INTEGER) ); + registerFunction("year", new StandardSQLFunction("year", Hibernate.INTEGER) ); + + registerFunction("double", new StandardSQLFunction("double", Hibernate.DOUBLE) ); + registerFunction("varchar", new StandardSQLFunction("varchar", Hibernate.STRING) ); + registerFunction("real", new StandardSQLFunction("real", Hibernate.FLOAT) ); + registerFunction("bigint", new StandardSQLFunction("bigint", Hibernate.LONG) ); + registerFunction("char", new StandardSQLFunction("char", Hibernate.CHARACTER) ); + registerFunction("integer", new StandardSQLFunction("integer", Hibernate.INTEGER) ); + registerFunction("smallint", new StandardSQLFunction("smallint", Hibernate.SHORT) ); + + registerFunction("digits", new StandardSQLFunction("digits", Hibernate.STRING) ); + registerFunction("chr", new StandardSQLFunction("chr", Hibernate.CHARACTER) ); + registerFunction("upper", new StandardSQLFunction("upper") ); + registerFunction("lower", new StandardSQLFunction("lower") ); + registerFunction("ucase", new StandardSQLFunction("ucase") ); + registerFunction("lcase", new StandardSQLFunction("lcase") ); + registerFunction("length", new StandardSQLFunction("length", Hibernate.LONG) ); + registerFunction("ltrim", new StandardSQLFunction("ltrim") ); + registerFunction("rtrim", new StandardSQLFunction("rtrim") ); + registerFunction( "substr", new StandardSQLFunction( "substr", Hibernate.STRING ) ); + registerFunction( "posstr", new StandardSQLFunction( "posstr", Hibernate.INTEGER ) ); + + registerFunction( "substring", new StandardSQLFunction( "substr", Hibernate.STRING ) ); + registerFunction( "bit_length", new SQLFunctionTemplate( Hibernate.INTEGER, "length(?1)*8" ) ); + registerFunction( "trim", new AnsiTrimEmulationFunction() ); + + registerFunction( "concat", new VarArgsSQLFunction(Hibernate.STRING, "", "||", "") ); + + registerFunction( "str", new SQLFunctionTemplate( Hibernate.STRING, "rtrim(char(?1))" ) ); + + registerKeyword("current"); + registerKeyword("date"); + registerKeyword("time"); + registerKeyword("timestamp"); + registerKeyword("fetch"); + registerKeyword("first"); + registerKeyword("rows"); + registerKeyword("only"); + + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, NO_BATCH); + } + + public String getLowercaseFunction() { + return "lcase"; + } + + public String getAddColumnString() { + return "add column"; + } + public boolean dropConstraints() { + return false; + } + public boolean supportsIdentityColumns() { + return true; + } + public String getIdentitySelectString() { + return "values identity_val_local()"; + } + public String getIdentityColumnString() { + return "generated by default as identity"; //not null ... (start with 1) is implicit + } + public String getIdentityInsertString() { + return "default"; + } + + public String getSequenceNextValString(String sequenceName) { + return "values nextval for " + sequenceName; + } + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; + } + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName + " restrict"; + } + + public boolean supportsSequences() { + return true; + } + + public String getQuerySequencesString() { + return "select seqname from sysibm.syssequences"; + } + + public boolean supportsLimit() { + return true; + } + + /*public String getLimitString(String sql, boolean hasOffset) { + StringBuffer rownumber = new StringBuffer(50) + .append(" rownumber() over("); + int orderByIndex = sql.toLowerCase().indexOf("order by"); + if (orderByIndex>0) rownumber.append( sql.substring(orderByIndex) ); + rownumber.append(") as row_,"); + StringBuffer pagingSelect = new StringBuffer( sql.length()+100 ) + .append("select * from ( ") + .append(sql) + .insert( getAfterSelectInsertPoint(sql)+16, rownumber.toString() ) + .append(" ) as temp_ where row_ "); + if (hasOffset) { + pagingSelect.append("between ?+1 and ?"); + } + else { + pagingSelect.append("<= ?"); + } + return pagingSelect.toString(); + }*/ + + /** + * Render the rownumber() over ( .... ) as rownumber_, + * bit, that goes in the select list + */ + private String getRowNumber(String sql) { + StringBuffer rownumber = new StringBuffer(50) + .append("rownumber() over("); + + int orderByIndex = sql.toLowerCase().indexOf("order by"); + + if ( orderByIndex>0 && !hasDistinct(sql) ) { + rownumber.append( sql.substring(orderByIndex) ); + } + + rownumber.append(") as rownumber_,"); + + return rownumber.toString(); + } + + public String getLimitString(String sql, boolean hasOffset) { + + int startOfSelect = sql.toLowerCase().indexOf("select"); + + StringBuffer pagingSelect = new StringBuffer( sql.length()+100 ) + .append( sql.substring(0, startOfSelect) ) //add the comment + .append("select * from ( select ") //nest the main query in an outer select + .append( getRowNumber(sql) ); //add the rownnumber bit into the outer query select list + + if ( hasDistinct(sql) ) { + pagingSelect.append(" row_.* from ( ") //add another (inner) nested select + .append( sql.substring(startOfSelect) ) //add the main query + .append(" ) as row_"); //close off the inner nested select + } + else { + pagingSelect.append( sql.substring( startOfSelect + 6 ) ); //add the main query + } + + pagingSelect.append(" ) as temp_ where rownumber_ "); + + //add the restriction to the outer select + if (hasOffset) { + pagingSelect.append("between ?+1 and ?"); + } + else { + pagingSelect.append("<= ?"); + } + + return pagingSelect.toString(); + } + + private static boolean hasDistinct(String sql) { + return sql.toLowerCase().indexOf("select distinct")>=0; + } + + public String getForUpdateString() { + return " for read only with rs"; + } + + public boolean useMaxForLimit() { + return true; + } + + public boolean supportsOuterJoinForUpdate() { + return false; + } + + public boolean supportsNotNullUnique() { + return false; + } + + public String getSelectClauseNullString(int sqlType) { + String literal; + switch(sqlType) { + case Types.VARCHAR: + case Types.CHAR: + literal = "'x'"; + break; + case Types.DATE: + literal = "'2000-1-1'"; + break; + case Types.TIMESTAMP: + literal = "'2000-1-1 00:00:00'"; + break; + case Types.TIME: + literal = "'00:00:00'"; + break; + default: + literal = "0"; + } + return "nullif(" + literal + ',' + literal + ')'; + } + + public static void main(String[] args) { + System.out.println( new DB2Dialect().getLimitString("/*foo*/ select * from foos", true) ); + System.out.println( new DB2Dialect().getLimitString("/*foo*/ select distinct * from foos", true) ); + System.out.println( new DB2Dialect().getLimitString("/*foo*/ select * from foos foo order by foo.bar, foo.baz", true) ); + System.out.println( new DB2Dialect().getLimitString("/*foo*/ select distinct * from foos foo order by foo.bar, foo.baz", true) ); + } + + public boolean supportsUnionAll() { + return true; + } + + public int registerResultSetOutParameter(CallableStatement statement, int col) throws SQLException { + return col; + } + + public ResultSet getResultSet(CallableStatement ps) throws SQLException { + boolean isResultSet = ps.execute(); + // This assumes you will want to ignore any update counts + while (!isResultSet && ps.getUpdateCount() != -1) { + isResultSet = ps.getMoreResults(); + } + ResultSet rs = ps.getResultSet(); + // You may still have other ResultSets or update counts left to process here + // but you can't do it now or the ResultSet you just got will be closed + return rs; + } + + public boolean supportsCommentOn() { + return true; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String getCreateTemporaryTableString() { + return "declare global temporary table"; + } + + public String getCreateTemporaryTablePostfix() { + return "not logged"; + } + + public String generateTemporaryTableName(String baseTableName) { + return "session." + super.generateTemporaryTableName(baseTableName); + } + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public String getCurrentTimestampSelectString() { + return "values current timestamp"; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + public boolean supportsParametersInInsertSelect() { + // DB2 known to not support parameters within the select + // clause of an SQL INSERT ... SELECT ... statement + return false; + } + + public String getCurrentTimestampSQLFunctionName() { + return "sysdate"; + } + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } +} diff --git a/src/org/hibernate/dialect/DataDirectOracle9Dialect.java b/src/org/hibernate/dialect/DataDirectOracle9Dialect.java new file mode 100644 index 0000000000..92bfb4e12c --- /dev/null +++ b/src/org/hibernate/dialect/DataDirectOracle9Dialect.java @@ -0,0 +1,25 @@ +package org.hibernate.dialect; + +import java.sql.CallableStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +public class DataDirectOracle9Dialect extends Oracle9Dialect { + + public int registerResultSetOutParameter(CallableStatement statement, int col) throws SQLException { + return col; // sql server just returns automatically + } + + public ResultSet getResultSet(CallableStatement ps) throws SQLException { + boolean isResultSet = ps.execute(); +// This assumes you will want to ignore any update counts + while (!isResultSet && ps.getUpdateCount() != -1) { + isResultSet = ps.getMoreResults(); + } + ResultSet rs = ps.getResultSet(); +// You may still have other ResultSets or update counts left to process here +// but you can't do it now or the ResultSet you just got will be closed + return rs; + } + +} diff --git a/src/org/hibernate/dialect/DerbyDialect.java b/src/org/hibernate/dialect/DerbyDialect.java new file mode 100755 index 0000000000..1c424721b2 --- /dev/null +++ b/src/org/hibernate/dialect/DerbyDialect.java @@ -0,0 +1,198 @@ +//$Id$ +package org.hibernate.dialect; + +import org.hibernate.Hibernate; +import org.hibernate.QueryException; +import org.hibernate.HibernateException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.id.TableHiLoGenerator; +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.DerbyCaseFragment; + +import java.util.List; +import java.util.ArrayList; + +/** + * @author Simon Johnston + * + * Hibernate Dialect for Cloudscape 10 - aka Derby. This implements both an + * override for the identity column generator as well as for the case statement + * issue documented at: + * http://www.jroller.com/comments/kenlars99/Weblog/cloudscape_soon_to_be_derby + */ +public class DerbyDialect extends DB2Dialect { + + public DerbyDialect() { + super(); + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(","||",")" ) ); + registerFunction( "trim", new DerbyTrimFunctionEmulation() ); + } + + /** + * This is different in Cloudscape to DB2. + */ + public String getIdentityColumnString() { + return "not null generated always as identity"; //$NON-NLS-1 + } + + /** + * Return the case statement modified for Cloudscape. + */ + public CaseFragment createCaseFragment() { + return new DerbyCaseFragment(); + } + + public boolean dropConstraints() { + return true; + } + + public Class getNativeIdentifierGeneratorClass() { + return TableHiLoGenerator.class; + } + + public boolean supportsSequences() { + return false; + } + + public boolean supportsLimit() { + return false; + } + + public boolean supportsLimitOffset() { + return false; + } + + public String getQuerySequencesString() { + return null ; + } + + /** + * A specialized function template to emulate the ANSI trim function on Derby DB + * since it does not support the full trim specification. However, we cannot even + * fully emulate it because there is not standard 'replace' function either. :( + */ + public static class DerbyTrimFunctionEmulation implements SQLFunction { + private static final SQLFunction LEADING_SPACE_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "ltrim( ?1 )"); + private static final SQLFunction TRAILING_SPACE_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "rtrim( ?1 )"); + private static final SQLFunction BOTH_SPACE_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "ltrim( rtrim( ?1 ) )"); + private static final SQLFunction BOTH_SPACE_TRIM_FROM = new SQLFunctionTemplate( Hibernate.STRING, "ltrim( rtrim( ?2 ) )"); + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return Hibernate.STRING; + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return false; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + // according to both the ANSI-SQL and EJB3 specs, trim can either take + // exactly one parameter or a variable number of parameters between 1 and 4. + // from the SQL spec: + // + // ::= + // TRIM + // + // ::= + // [ [ ] [ ] FROM ] + // + // ::= + // LEADING + // | TRAILING + // | BOTH + // + // If only is omitted, BOTH is assumed; + // if is omitted, space is assumed + if ( args.size() == 1 ) { + // we have the form: trim(trimSource) + // so we trim leading and trailing spaces + return BOTH_SPACE_TRIM.render( args, factory ); + } + else if ( "from".equalsIgnoreCase( ( String ) args.get( 0 ) ) ) { + // we have the form: trim(from trimSource). + // This is functionally equivalent to trim(trimSource) + return BOTH_SPACE_TRIM_FROM.render( args, factory ); + } + else { + // otherwise, a trim-specification and/or a trim-character + // have been specified; we need to decide which options + // are present and "do the right thing" + boolean leading = true; // should leading trim-characters be trimmed? + boolean trailing = true; // should trailing trim-characters be trimmed? + String trimCharacter; // the trim-character + String trimSource; // the trim-source + + // potentialTrimCharacterArgIndex = 1 assumes that a + // trim-specification has been specified. we handle the + // exception to that explicitly + int potentialTrimCharacterArgIndex = 1; + String firstArg = ( String ) args.get( 0 ); + if ( "leading".equalsIgnoreCase( firstArg ) ) { + trailing = false; + } + else if ( "trailing".equalsIgnoreCase( firstArg ) ) { + leading = false; + } + else if ( "both".equalsIgnoreCase( firstArg ) ) { + } + else { + potentialTrimCharacterArgIndex = 0; + } + + String potentialTrimCharacter = ( String ) args.get( potentialTrimCharacterArgIndex ); + if ( "from".equalsIgnoreCase( potentialTrimCharacter ) ) { + trimCharacter = "' '"; + trimSource = ( String ) args.get( potentialTrimCharacterArgIndex + 1 ); + } + else if ( potentialTrimCharacterArgIndex + 1 >= args.size() ) { + trimCharacter = "' '"; + trimSource = potentialTrimCharacter; + } + else { + trimCharacter = potentialTrimCharacter; + if ( "from".equalsIgnoreCase( ( String ) args.get( potentialTrimCharacterArgIndex + 1 ) ) ) { + trimSource = ( String ) args.get( potentialTrimCharacterArgIndex + 2 ); + } + else { + trimSource = ( String ) args.get( potentialTrimCharacterArgIndex + 1 ); + } + } + + List argsToUse = new ArrayList(); + argsToUse.add( trimSource ); + argsToUse.add( trimCharacter ); + + if ( trimCharacter.equals( "' '" ) ) { + if ( leading && trailing ) { + return BOTH_SPACE_TRIM.render( argsToUse, factory ); + } + else if ( leading ) { + return LEADING_SPACE_TRIM.render( argsToUse, factory ); + } + else { + return TRAILING_SPACE_TRIM.render( argsToUse, factory ); + } + } + else { + throw new HibernateException( "cannot specify trim character when using Derby as Derby does not support the ANSI trim function, not does it support a replace function to properly emmulate it" ); + } + } + } + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsLobValueChangePropogation() { + return false; + } +} diff --git a/src/org/hibernate/dialect/Dialect.java b/src/org/hibernate/dialect/Dialect.java new file mode 100644 index 0000000000..edd80e5ddf --- /dev/null +++ b/src/org/hibernate/dialect/Dialect.java @@ -0,0 +1,1765 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.CallableStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.CastFunction; +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; +import org.hibernate.engine.Mapping; +import org.hibernate.exception.SQLExceptionConverter; +import org.hibernate.exception.SQLStateConverter; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.hibernate.id.IdentityGenerator; +import org.hibernate.id.SequenceGenerator; +import org.hibernate.id.TableHiLoGenerator; +import org.hibernate.mapping.Column; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.sql.ANSICaseFragment; +import org.hibernate.sql.ANSIJoinFragment; +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.ForUpdateFragment; +import org.hibernate.type.Type; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +/** + * Represents a dialect of SQL implemented by a particular RDBMS. + * Subclasses implement Hibernate compatibility with different systems.
    + *
    + * Subclasses should provide a public default constructor that register() + * a set of type mappings and default Hibernate properties.
    + *
    + * Subclasses should be immutable. + * + * @author Gavin King, David Channon + */ +public abstract class Dialect { + + private static final Log log = LogFactory.getLog( Dialect.class ); + + public static final String DEFAULT_BATCH_SIZE = "15"; + public static final String NO_BATCH = "0"; + + /** + * Characters used for quoting SQL identifiers + */ + public static final String QUOTE = "`\"["; + public static final String CLOSED_QUOTE = "`\"]"; + + + // build the map of standard ANSI SQL aggregation functions ~~~~~~~~~~~~~~~ + + private static final Map STANDARD_AGGREGATE_FUNCTIONS = new HashMap(); + static { + STANDARD_AGGREGATE_FUNCTIONS.put( "count", new StandardSQLFunction("count") { + public Type getReturnType(Type columnType, Mapping mapping) { + return Hibernate.LONG; + } + } ); + + STANDARD_AGGREGATE_FUNCTIONS.put( "avg", new StandardSQLFunction("avg") { + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + int[] sqlTypes; + try { + sqlTypes = columnType.sqlTypes( mapping ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + if ( sqlTypes.length != 1 ) throw new QueryException( "multi-column type in avg()" ); + return Hibernate.DOUBLE; + } + } ); + + STANDARD_AGGREGATE_FUNCTIONS.put( "max", new StandardSQLFunction("max") ); + STANDARD_AGGREGATE_FUNCTIONS.put( "min", new StandardSQLFunction("min") ); + STANDARD_AGGREGATE_FUNCTIONS.put( "sum", new StandardSQLFunction("sum") { + public Type getReturnType(Type columnType, Mapping mapping) { + //pre H3.2 behavior: super.getReturnType(ct, m); + int[] sqlTypes; + try { + sqlTypes = columnType.sqlTypes( mapping ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + if ( sqlTypes.length != 1 ) throw new QueryException( "multi-column type in sum()" ); + int sqlType = sqlTypes[0]; + + // First allow the actual type to control the return value. (the actual underlying sqltype could actually be different) + if ( columnType == Hibernate.BIG_INTEGER ) { + return Hibernate.BIG_INTEGER; + } + else if ( columnType == Hibernate.BIG_DECIMAL ) { + return Hibernate.BIG_DECIMAL; + } + else if ( columnType == Hibernate.LONG || columnType == Hibernate.SHORT || columnType == Hibernate.INTEGER) { + return Hibernate.LONG; + } + else if ( columnType == Hibernate.FLOAT || columnType == Hibernate.DOUBLE) { + return Hibernate.DOUBLE; + } + + // finally use the sqltype if == on Hibernate types did not find a match. + if ( sqlType == Types.NUMERIC ) { + return columnType; //because numeric can be anything + } + else if ( sqlType == Types.FLOAT || sqlType == Types.DOUBLE || sqlType == Types.DECIMAL || sqlType == Types.REAL) { + return Hibernate.DOUBLE; + } + else if ( sqlType == Types.BIGINT || sqlType == Types.INTEGER || sqlType == Types.SMALLINT || sqlType == Types.TINYINT ) { + return Hibernate.LONG; + } + else { + return columnType; + } + } + }); + } + + private final TypeNames typeNames = new TypeNames(); + private final TypeNames hibernateTypeNames = new TypeNames(); + + private final Properties properties = new Properties(); + private final Map sqlFunctions = new HashMap(); + private final Set sqlKeywords = new HashSet(); + + + // constructors and factory methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected Dialect() { + log.info( "Using dialect: " + this ); + sqlFunctions.putAll( STANDARD_AGGREGATE_FUNCTIONS ); + + // standard sql92 functions (can be overridden by subclasses) + registerFunction( "substring", new SQLFunctionTemplate( Hibernate.STRING, "substring(?1, ?2, ?3)" ) ); + registerFunction( "locate", new SQLFunctionTemplate( Hibernate.INTEGER, "locate(?1, ?2, ?3)" ) ); + registerFunction( "trim", new SQLFunctionTemplate( Hibernate.STRING, "trim(?1 ?2 ?3 ?4)" ) ); + registerFunction( "length", new StandardSQLFunction( "length", Hibernate.INTEGER ) ); + registerFunction( "bit_length", new StandardSQLFunction( "bit_length", Hibernate.INTEGER ) ); + registerFunction( "coalesce", new StandardSQLFunction( "coalesce" ) ); + registerFunction( "nullif", new StandardSQLFunction( "nullif" ) ); + registerFunction( "abs", new StandardSQLFunction( "abs" ) ); + registerFunction( "mod", new StandardSQLFunction( "mod", Hibernate.INTEGER) ); + registerFunction( "sqrt", new StandardSQLFunction( "sqrt", Hibernate.DOUBLE) ); + registerFunction( "upper", new StandardSQLFunction("upper") ); + registerFunction( "lower", new StandardSQLFunction("lower") ); + registerFunction( "cast", new CastFunction() ); + registerFunction( "extract", new SQLFunctionTemplate(Hibernate.INTEGER, "extract(?1 ?2 ?3)") ); + + //map second/minute/hour/day/month/year to ANSI extract(), override on subclasses + registerFunction( "second", new SQLFunctionTemplate(Hibernate.INTEGER, "extract(second from ?1)") ); + registerFunction( "minute", new SQLFunctionTemplate(Hibernate.INTEGER, "extract(minute from ?1)") ); + registerFunction( "hour", new SQLFunctionTemplate(Hibernate.INTEGER, "extract(hour from ?1)") ); + registerFunction( "day", new SQLFunctionTemplate(Hibernate.INTEGER, "extract(day from ?1)") ); + registerFunction( "month", new SQLFunctionTemplate(Hibernate.INTEGER, "extract(month from ?1)") ); + registerFunction( "year", new SQLFunctionTemplate(Hibernate.INTEGER, "extract(year from ?1)") ); + + registerFunction( "str", new SQLFunctionTemplate(Hibernate.STRING, "cast(?1 as char)") ); + + // register hibernate types for default use in scalar sqlquery type auto detection + registerHibernateType( Types.BIGINT, Hibernate.BIG_INTEGER.getName() ); + registerHibernateType( Types.BINARY, Hibernate.BINARY.getName() ); + registerHibernateType( Types.BIT, Hibernate.BOOLEAN.getName() ); + registerHibernateType( Types.CHAR, Hibernate.CHARACTER.getName() ); + registerHibernateType( Types.DATE, Hibernate.DATE.getName() ); + registerHibernateType( Types.DOUBLE, Hibernate.DOUBLE.getName() ); + registerHibernateType( Types.FLOAT, Hibernate.FLOAT.getName() ); + registerHibernateType( Types.INTEGER, Hibernate.INTEGER.getName() ); + registerHibernateType( Types.SMALLINT, Hibernate.SHORT.getName() ); + registerHibernateType( Types.TINYINT, Hibernate.BYTE.getName() ); + registerHibernateType( Types.TIME, Hibernate.TIME.getName() ); + registerHibernateType( Types.TIMESTAMP, Hibernate.TIMESTAMP.getName() ); + registerHibernateType( Types.VARCHAR, Hibernate.STRING.getName() ); + registerHibernateType( Types.VARBINARY, Hibernate.BINARY.getName() ); + registerHibernateType( Types.NUMERIC, Hibernate.BIG_DECIMAL.getName() ); + registerHibernateType( Types.DECIMAL, Hibernate.BIG_DECIMAL.getName() ); + registerHibernateType( Types.BLOB, Hibernate.BLOB.getName() ); + registerHibernateType( Types.CLOB, Hibernate.CLOB.getName() ); + registerHibernateType( Types.REAL, Hibernate.FLOAT.getName() ); + } + + /** + * Get an instance of the dialect specified by the current System properties. + * + * @return The specified Dialect + * @throws HibernateException If no dialect was specified, or if it could not be instantiated. + */ + public static Dialect getDialect() throws HibernateException { + String dialectName = Environment.getProperties().getProperty( Environment.DIALECT ); + return instantiateDialect( dialectName ); + } + + + /** + * Get an instance of the dialect specified by the given properties or by + * the current System properties. + * + * @param props The properties to use for finding the dialect class to use. + * @return The specified Dialect + * @throws HibernateException If no dialect was specified, or if it could not be instantiated. + */ + public static Dialect getDialect(Properties props) throws HibernateException { + String dialectName = props.getProperty( Environment.DIALECT ); + if ( dialectName == null ) { + return getDialect(); + } + return instantiateDialect( dialectName ); + } + + private static Dialect instantiateDialect(String dialectName) throws HibernateException { + if ( dialectName == null ) { + throw new HibernateException( "The dialect was not set. Set the property hibernate.dialect." ); + } + try { + return ( Dialect ) ReflectHelper.classForName( dialectName ).newInstance(); + } + catch ( ClassNotFoundException cnfe ) { + throw new HibernateException( "Dialect class not found: " + dialectName ); + } + catch ( Exception e ) { + throw new HibernateException( "Could not instantiate dialect class", e ); + } + } + + /** + * Retrieve a set of default Hibernate properties for this database. + * + * @return a set of Hibernate properties + */ + public final Properties getDefaultProperties() { + return properties; + } + + public String toString() { + return getClass().getName(); + } + + + // database type mapping support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Get the name of the database type associated with the given + * {@link java.sql.Types} typecode. + * + * @param code The {@link java.sql.Types} typecode + * @return the database type name + * @throws HibernateException If no mapping was specified for that type. + */ + public String getTypeName(int code) throws HibernateException { + String result = typeNames.get( code ); + if ( result == null ) { + throw new HibernateException( "No default type mapping for (java.sql.Types) " + code ); + } + return result; + } + + /** + * Get the name of the database type associated with the given + * {@link java.sql.Types} typecode with the given storage specification + * parameters. + * + * @param code The {@link java.sql.Types} typecode + * @param length The datatype length + * @param precision The datatype precision + * @param scale The datatype scale + * @return the database type name + * @throws HibernateException If no mapping was specified for that type. + */ + public String getTypeName(int code, int length, int precision, int scale) throws HibernateException { + String result = typeNames.get( code, length, precision, scale ); + if ( result == null ) { + throw new HibernateException( + "No type mapping for java.sql.Types code: " + + code + + ", length: " + + length + ); + } + return result; + } + + /** + * Get the name of the database type appropriate for casting operations + * (via the CAST() SQL function) for the given {@link java.sql.Types} typecode. + * + * @param code The {@link java.sql.Types} typecode + * @return The database type name + */ + public String getCastTypeName(int code) { + return getTypeName( code, Column.DEFAULT_LENGTH, Column.DEFAULT_PRECISION, Column.DEFAULT_SCALE ); + } + + /** + * Subclasses register a type name for the given type code and maximum + * column length. $l in the type name with be replaced by the + * column length (if appropriate). + * + * @param code The {@link java.sql.Types} typecode + * @param capacity The maximum length of database type + * @param name The database type name + */ + protected void registerColumnType(int code, int capacity, String name) { + typeNames.put( code, capacity, name ); + } + + /** + * Subclasses register a type name for the given type code. $l in + * the type name with be replaced by the column length (if appropriate). + * + * @param code The {@link java.sql.Types} typecode + * @param name The database type name + */ + protected void registerColumnType(int code, String name) { + typeNames.put( code, name ); + } + + + // hibernate type mapping support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Get the name of the Hibernate {@link org.hibernate.type.Type} associated with th given + * {@link java.sql.Types} typecode. + * + * @param code The {@link java.sql.Types} typecode + * @return The Hibernate {@link org.hibernate.type.Type} name. + * @throws HibernateException If no mapping was specified for that type. + */ + public String getHibernateTypeName(int code) throws HibernateException { + String result = hibernateTypeNames.get( code ); + if ( result == null ) { + throw new HibernateException( "No Hibernate type mapping for java.sql.Types code: " + code ); + } + return result; + } + + /** + * Get the name of the Hibernate {@link org.hibernate.type.Type} associated + * with the given {@link java.sql.Types} typecode with the given storage + * specification parameters. + * + * @param code The {@link java.sql.Types} typecode + * @param length The datatype length + * @param precision The datatype precision + * @param scale The datatype scale + * @return The Hibernate {@link org.hibernate.type.Type} name. + * @throws HibernateException If no mapping was specified for that type. + */ + public String getHibernateTypeName(int code, int length, int precision, int scale) throws HibernateException { + String result = hibernateTypeNames.get( code, length, precision, scale ); + if ( result == null ) { + throw new HibernateException( + "No Hibernate type mapping for java.sql.Types code: " + + code + + ", length: " + + length + ); + } + return result; + } + + /** + * Registers a Hibernate {@link org.hibernate.type.Type} name for the given + * {@link java.sql.Types} type code and maximum column length. + * + * @param code The {@link java.sql.Types} typecode + * @param capacity The maximum length of database type + * @param name The Hibernate {@link org.hibernate.type.Type} name + */ + protected void registerHibernateType(int code, int capacity, String name) { + hibernateTypeNames.put( code, capacity, name); + } + + /** + * Registers a Hibernate {@link org.hibernate.type.Type} name for the given + * {@link java.sql.Types} type code. + * + * @param code The {@link java.sql.Types} typecode + * @param name The Hibernate {@link org.hibernate.type.Type} name + */ + protected void registerHibernateType(int code, String name) { + hibernateTypeNames.put( code, name); + } + + + // function support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected void registerFunction(String name, SQLFunction function) { + sqlFunctions.put( name, function ); + } + + /** + * Retrieves a map of the dialect's registered fucntions + * (functionName => {@link org.hibernate.dialect.function.SQLFunction}). + * + * @return The map of registered functions. + */ + public final Map getFunctions() { + return sqlFunctions; + } + + + // keyword support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected void registerKeyword(String word) { + sqlKeywords.add(word); + } + + public Set getKeywords() { + return sqlKeywords; + } + + + // native identifier generatiion ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * The class (which implements {@link org.hibernate.id.IdentifierGenerator}) + * which acts as this dialects native generation strategy. + *

    + * Comes into play whenever the user specifies the native generator. + * + * @return The native generator class. + */ + public Class getNativeIdentifierGeneratorClass() { + if ( supportsIdentityColumns() ) { + return IdentityGenerator.class; + } + else if ( supportsSequences() ) { + return SequenceGenerator.class; + } + else { + return TableHiLoGenerator.class; + } + } + + + // IDENTITY support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Does this dialect support identity column key generation? + * + * @return True if IDENTITY columns are supported; false otherwise. + */ + public boolean supportsIdentityColumns() { + return false; + } + + /** + * Does the dialect support some form of inserting and selecting + * the generated IDENTITY value all in the same statement. + * + * @return True if the dialect supports selecting the just + * generated IDENTITY in the insert statement. + */ + public boolean supportsInsertSelectIdentity() { + return false; + } + + /** + * Whether this dialect have an Identity clause added to the data type or a + * completely seperate identity data type + * + * @return boolean + */ + public boolean hasDataTypeInIdentityColumn() { + return true; + } + + /** + * Provided we {@link #supportsInsertSelectIdentity}, then attch the + * "select identity" clause to the insert statement. + *

    + * Note, if {@link #supportsInsertSelectIdentity} == false then + * the insert-string should be returned without modification. + * + * @param insertString The insert command + * @return The insert command with any necessary identity select + * clause attached. + */ + public String appendIdentitySelectToInsert(String insertString) { + return insertString; + } + + /** + * Get the select command to use to retrieve the last generated IDENTITY + * value for a particuar table + * + * @param table The table into which the insert was done + * @param column The PK column. + * @param type The {@link java.sql.Types} type code. + * @return The appropriate select command + * @throws MappingException If IDENTITY generation is not supported. + */ + public String getIdentitySelectString(String table, String column, int type) throws MappingException { + return getIdentitySelectString(); + } + + /** + * Get the select command to use to retrieve the last generated IDENTITY + * value. + * + * @return The appropriate select command + * @throws MappingException If IDENTITY generation is not supported. + */ + protected String getIdentitySelectString() throws MappingException { + throw new MappingException( "Dialect does not support identity key generation" ); + } + + /** + * The syntax used during DDL to define a column as being an IDENTITY of + * a particular type. + * + * @param type The {@link java.sql.Types} type code. + * @return The appropriate DDL fragment. + * @throws MappingException If IDENTITY generation is not supported. + */ + public String getIdentityColumnString(int type) throws MappingException { + return getIdentityColumnString(); + } + + /** + * The syntax used during DDL to define a column as being an IDENTITY. + * + * @return The appropriate DDL fragment. + * @throws MappingException If IDENTITY generation is not supported. + */ + protected String getIdentityColumnString() throws MappingException { + throw new MappingException( "Dialect does not support identity key generation" ); + } + + /** + * The keyword used to insert a generated value into an identity column (or null). + * Need if the dialect does not support inserts that specify no column values. + * + * @return The appropriate keyword. + */ + public String getIdentityInsertString() { + return null; + } + + + // SEQUENCE support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Does this dialect support sequences? + * + * @return True if sequences supported; false otherwise. + */ + public boolean supportsSequences() { + return false; + } + + /** + * Does this dialect support "pooled" sequences. Not aware of a better + * name for this. Essentially can we specify the initial and increment values? + * + * @return True if such "pooled" sequences are supported; false otherwise. + * @see #getCreateSequenceStrings(String, int, int) + * @see #getCreateSequenceString(String, int, int) + */ + public boolean supportsPooledSequences() { + return false; + } + + /** + * Generate the appropriate select statement to to retreive the next value + * of a sequence. + *

    + * This should be a "stand alone" select statement. + * + * @param sequenceName the name of the sequence + * @return String The "nextval" select string. + * @throws MappingException If sequences are not supported. + */ + public String getSequenceNextValString(String sequenceName) throws MappingException { + throw new MappingException( "Dialect does not support sequences" ); + } + + /** + * Generate the select expression fragment that will retreive the next + * value of a sequence as part of another (typically DML) statement. + *

    + * This differs from {@link #getSequenceNextValString(String)} in that this + * should return an expression usable within another statement. + * + * @param sequenceName the name of the sequence + * @return The "nextval" fragment. + * @throws MappingException If sequences are not supported. + */ + public String getSelectSequenceNextValString(String sequenceName) throws MappingException { + throw new MappingException( "Dialect does not support sequences" ); + } + + /** + * The multiline script used to create a sequence. + * + * @param sequenceName The name of the sequence + * @return The sequence creation commands + * @throws MappingException If sequences are not supported. + * @deprecated Use {@link #getCreateSequenceString(String, int, int)} instead + */ + public String[] getCreateSequenceStrings(String sequenceName) throws MappingException { + return new String[] { getCreateSequenceString( sequenceName ) }; + } + + /** + * An optional multi-line form for databases which {@link #supportsPooledSequences()}. + * + * @param sequenceName The name of the sequence + * @param initialValue The initial value to apply to 'create sequence' statement + * @param incrementSize The increment value to apply to 'create sequence' statement + * @return The sequence creation commands + * @throws MappingException If sequences are not supported. + */ + public String[] getCreateSequenceStrings(String sequenceName, int initialValue, int incrementSize) throws MappingException { + return new String[] { getCreateSequenceString( sequenceName, initialValue, incrementSize ) }; + } + + /** + * Typically dialects which support sequences can create a sequence + * with a single command. This is convenience form of + * {@link #getCreateSequenceStrings} to help facilitate that. + *

    + * Dialects which support sequences and can create a sequence in a + * single command need *only* override this method. Dialects + * which support sequences but require multiple commands to create + * a sequence should instead override {@link #getCreateSequenceStrings}. + * + * @param sequenceName The name of the sequence + * @return The sequence creation command + * @throws MappingException If sequences are not supported. + */ + protected String getCreateSequenceString(String sequenceName) throws MappingException { + throw new MappingException( "Dialect does not support sequences" ); + } + + /** + * Overloaded form of {@link #getCreateSequenceString(String)}, additionally + * taking the initial value and increment size to be applied to the sequence + * definition. + *

    + * The default definition is to suffix {@link #getCreateSequenceString(String)} + * with the string: " start with {initialValue} increment by {incrementSize}" where + * {initialValue} and {incrementSize} are replacement placeholders. Generally + * dialects should only need to override this method if different key phrases + * are used to apply the allocation information. + * + * @param sequenceName The name of the sequence + * @param initialValue The initial value to apply to 'create sequence' statement + * @param incrementSize The increment value to apply to 'create sequence' statement + * @return The sequence creation command + * @throws MappingException If sequences are not supported. + */ + protected String getCreateSequenceString(String sequenceName, int initialValue, int incrementSize) throws MappingException { + if ( supportsPooledSequences() ) { + return getCreateSequenceString( sequenceName ) + " start with " + initialValue + " increment by " + incrementSize; + } + throw new MappingException( "Dialect does not support pooled sequences" ); + } + + /** + * The multiline script used to drop a sequence. + * + * @param sequenceName The name of the sequence + * @return The sequence drop commands + * @throws MappingException If sequences are not supported. + */ + public String[] getDropSequenceStrings(String sequenceName) throws MappingException { + return new String[]{getDropSequenceString( sequenceName )}; + } + + /** + * Typically dialects which support sequences can drop a sequence + * with a single command. This is convenience form of + * {@link #getDropSequenceStrings} to help facilitate that. + *

    + * Dialects which support sequences and can drop a sequence in a + * single command need *only* override this method. Dialects + * which support sequences but require multiple commands to drop + * a sequence should instead override {@link #getDropSequenceStrings}. + * + * @param sequenceName The name of the sequence + * @return The sequence drop commands + * @throws MappingException If sequences are not supported. + */ + protected String getDropSequenceString(String sequenceName) throws MappingException { + throw new MappingException( "Dialect does not support sequences" ); + } + + /** + * Get the select command used retrieve the names of all sequences. + * + * @return The select command; or null if sequences are not supported. + * @see org.hibernate.tool.hbm2ddl.SchemaUpdate + */ + public String getQuerySequencesString() { + return null; + } + + + // GUID support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Get the command used to select a GUID from the underlying database. + *

    + * Optional operation. + * + * @return The appropriate command. + */ + public String getSelectGUIDString() { + throw new UnsupportedOperationException( "dialect does not support GUIDs" ); + } + + + // limit/offset support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Does this dialect support some form of limiting query results + * via a SQL clause? + * + * @return True if this dialect supports some form of LIMIT. + */ + public boolean supportsLimit() { + return false; + } + + /** + * Does this dialect's LIMIT support (if any) additionally + * support specifying an offset? + * + * @return True if the dialect supports an offset within the limit support. + */ + public boolean supportsLimitOffset() { + return supportsLimit(); + } + + /** + * Does this dialect support bind variables (i.e., prepared statememnt + * parameters) for its limit/offset? + * + * @return True if bind variables can be used; false otherwise. + */ + public boolean supportsVariableLimit() { + return supportsLimit(); + } + + /** + * ANSI SQL defines the LIMIT clause to be in the form LIMIT offset, limit. + * Does this dialect require us to bind the parameters in reverse order? + * + * @return true if the correct order is limit, offset + */ + public boolean bindLimitParametersInReverseOrder() { + return false; + } + + /** + * Does the LIMIT clause come at the start of the + * SELECT statement, rather than at the end? + * + * @return true if limit parameters should come before other parameters + */ + public boolean bindLimitParametersFirst() { + return false; + } + + /** + * Does the LIMIT clause take a "maximum" row number instead + * of a total number of returned rows? + *

    + * This is easiest understood via an example. Consider you have a table + * with 20 rows, but you only want to retrieve rows number 11 through 20. + * Generally, a limit with offset would say that the offset = 11 and the + * limit = 10 (we only want 10 rows at a time); this is specifying the + * total number of returned rows. Some dialects require that we instead + * specify offset = 11 and limit = 20, where 20 is the "last" row we want + * relative to offset (i.e. total number of rows = 20 - 11 = 9) + *

    + * So essentially, is limit relative from offset? Or is limit absolute? + * + * @return True if limit is relative from offset; false otherwise. + */ + public boolean useMaxForLimit() { + return false; + } + + /** + * Given a limit and an offset, apply the limit clause to the query. + * + * @param query The query to which to apply the limit. + * @param offset The offset of the limit + * @param limit The limit of the limit ;) + * @return The modified query statement with the limit applied. + */ + public String getLimitString(String query, int offset, int limit) { + return getLimitString( query, offset > 0 ); + } + + /** + * Apply s limit clause to the query. + *

    + * Typically dialects utilize {@link #supportsVariableLimit() variable} + * limit caluses when they support limits. Thus, when building the + * select command we do not actually need to know the limit or the offest + * since we will just be using placeholders. + *

    + * Here we do still pass along whether or not an offset was specified + * so that dialects not supporting offsets can generate proper exceptions. + * In general, dialects will override one or the other of this method and + * {@link #getLimitString(String, int, int)}. + * + * @param query The query to which to apply the limit. + * @param hasOffset Is the query requesting an offset? + * @return the modified SQL + */ + protected String getLimitString(String query, boolean hasOffset) { + throw new UnsupportedOperationException( "paged queries not supported" ); + } + + + // lock acquisition support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Get a strategy instance which knows how to acquire a database-level lock + * of the specified mode for this dialect. + * + * @param lockable The persister for the entity to be locked. + * @param lockMode The type of lock to be acquired. + * @return The appropriate locking strategy. + * @since 3.2 + */ + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + return new SelectLockingStrategy( lockable, lockMode ); + } + + /** + * Given a lock mode, determine the appropriate for update fragment to use. + * + * @param lockMode The lock mode to apply. + * @return The appropriate for update fragment. + */ + public String getForUpdateString(LockMode lockMode) { + if ( lockMode==LockMode.UPGRADE ) { + return getForUpdateString(); + } + else if ( lockMode==LockMode.UPGRADE_NOWAIT ) { + return getForUpdateNowaitString(); + } + else if ( lockMode==LockMode.FORCE ) { + return getForUpdateNowaitString(); + } + else { + return ""; + } + } + + /** + * Get the string to append to SELECT statements to acquire locks + * for this dialect. + * + * @return The appropriate FOR UPDATE clause string. + */ + public String getForUpdateString() { + return " for update"; + } + + /** + * Is FOR UPDATE OF syntax supported? + * + * @return True if the database supports FOR UPDATE OF syntax; + * false otherwise. + */ + public boolean forUpdateOfColumns() { + // by default we report no support + return false; + } + + /** + * Does this dialect support FOR UPDATE in conjunction with + * outer joined rows? + * + * @return True if outer joined rows can be locked via FOR UPDATE. + */ + public boolean supportsOuterJoinForUpdate() { + return true; + } + + /** + * Get the FOR UPDATE OF column_list fragment appropriate for this + * dialect given the aliases of the columns to be write locked. + * + * @param aliases The columns to be write locked. + * @return The appropriate FOR UPDATE OF column_list clause string. + */ + public String getForUpdateString(String aliases) { + // by default we simply return the getForUpdateString() result since + // the default is to say no support for "FOR UPDATE OF ..." + return getForUpdateString(); + } + + /** + * Retrieves the FOR UPDATE NOWAIT syntax specific to this dialect. + * + * @return The appropriate FOR UPDATE NOWAIT clause string. + */ + public String getForUpdateNowaitString() { + // by default we report no support for NOWAIT lock semantics + return getForUpdateString(); + } + + /** + * Get the FOR UPDATE OF column_list NOWAIT fragment appropriate + * for this dialect given the aliases of the columns to be write locked. + * + * @param aliases The columns to be write locked. + * @return The appropriate FOR UPDATE colunm_list NOWAIT clause string. + */ + public String getForUpdateNowaitString(String aliases) { + return getForUpdateString( aliases ); + } + + /** + * Some dialects support an alternative means to SELECT FOR UPDATE, + * whereby a "lock hint" is appends to the table name in the from clause. + *

    + * contributed by Helge Schulz + * + * @param mode The lock mode to apply + * @param tableName The name of the table to which to apply the lock hint. + * @return The table with any required lock hints. + */ + public String appendLockHint(LockMode mode, String tableName) { + return tableName; + } + + /** + * Modifies the given SQL by applying the appropriate updates for the specified + * lock modes and key columns. + *

    + * The behavior here is that of an ANSI SQL SELECT FOR UPDATE. This + * method is really intended to allow dialects which do not support + * SELECT FOR UPDATE to achieve this in their own fashion. + * + * @param sql the SQL string to modify + * @param aliasedLockModes a map of lock modes indexed by aliased table names. + * @param keyColumnNames a map of key columns indexed by aliased table names. + * @return the modified SQL string. + */ + public String applyLocksToSql(String sql, Map aliasedLockModes, Map keyColumnNames) { + return sql + new ForUpdateFragment( this, aliasedLockModes, keyColumnNames ).toFragmentString(); + } + + + // table support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Command used to create a table. + * + * @return The command used to create a table. + */ + public String getCreateTableString() { + return "create table"; + } + + /** + * Slight variation on {@link #getCreateTableString}. Here, we have the + * command used to create a table when there is no primary key and + * duplicate rows are expected. + *

    + * Most databases do not care about the distinction; originally added for + * Teradata support which does care. + * + * @return The command used to create a multiset table. + */ + public String getCreateMultisetTableString() { + return getCreateTableString(); + } + + + // temporary table support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Does this dialect support temporary tables? + * + * @return True if temp tables are supported; false otherwise. + */ + public boolean supportsTemporaryTables() { + return false; + } + + /** + * Generate a temporary table name given the bas table. + * + * @param baseTableName The table name from which to base the temp table name. + * @return The generated temp table name. + */ + public String generateTemporaryTableName(String baseTableName) { + return "HT_" + baseTableName; + } + + /** + * Command used to create a temporary table. + * + * @return The command used to create a temporary table. + */ + public String getCreateTemporaryTableString() { + return "create table"; + } + + /** + * Get any fragments needing to be postfixed to the command for + * temporary table creation. + * + * @return Any required postfix. + */ + public String getCreateTemporaryTablePostfix() { + return ""; + } + + /** + * Does the dialect require that temporary table DDL statements occur in + * isolation from other statements? This would be the case if the creation + * would cause any current transaction to get committed implicitly. + *

    + * JDBC defines a standard way to query for this information via the + * {@link java.sql.DatabaseMetaData#dataDefinitionCausesTransactionCommit()} + * method. However, that does not distinguish between temporary table + * DDL and other forms of DDL; MySQL, for example, reports DDL causing a + * transaction commit via its driver, even though that is not the case for + * temporary table DDL. + *

    + * Possible return values and their meanings:

      + *
    • {@link Boolean#TRUE} - Unequivocally, perform the temporary table DDL + * in isolation.
    • + *
    • {@link Boolean#FALSE} - Unequivocally, do not perform the + * temporary table DDL in isolation.
    • + *
    • null - defer to the JDBC driver response in regards to + * {@link java.sql.DatabaseMetaData#dataDefinitionCausesTransactionCommit()}
    • + *
    + * + * @return see the result matrix above. + */ + public Boolean performTemporaryTableDDLInIsolation() { + return null; + } + + /** + * Do we need to drop the temporary table after use? + * + * @return True if the table should be dropped. + */ + public boolean dropTemporaryTableAfterUse() { + return true; + } + + + // callable statement support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Registers an OUT parameter which will be returing a + * {@link java.sql.ResultSet}. How this is accomplished varies greatly + * from DB to DB, hence its inclusion (along with {@link #getResultSet}) here. + * + * @param statement The callable statement. + * @param position The bind position at which to register the OUT param. + * @return The number of (contiguous) bind positions used. + * @throws SQLException Indicates problems registering the OUT param. + */ + public int registerResultSetOutParameter(CallableStatement statement, int position) throws SQLException { + throw new UnsupportedOperationException( + getClass().getName() + + " does not support resultsets via stored procedures" + ); + } + + /** + * Given a callable statement previously processed by {@link #registerResultSetOutParameter}, + * extract the {@link java.sql.ResultSet} from the OUT parameter. + * + * @param statement The callable statement. + * @return The extracted result set. + * @throws SQLException Indicates problems extracting the result set. + */ + public ResultSet getResultSet(CallableStatement statement) throws SQLException { + throw new UnsupportedOperationException( + getClass().getName() + + " does not support resultsets via stored procedures" + ); + } + + // current timestamp support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Does this dialect support a way to retrieve the database's current + * timestamp value? + * + * @return True if the current timestamp can be retrieved; false otherwise. + */ + public boolean supportsCurrentTimestampSelection() { + return false; + } + + /** + * Should the value returned by {@link #getCurrentTimestampSelectString} + * be treated as callable. Typically this indicates that JDBC escape + * sytnax is being used... + * + * @return True if the {@link #getCurrentTimestampSelectString} return + * is callable; false otherwise. + */ + public boolean isCurrentTimestampSelectStringCallable() { + throw new UnsupportedOperationException( "Database not known to define a current timestamp function" ); + } + + /** + * Retrieve the command used to retrieve the current timestammp from the + * database. + * + * @return The command. + */ + public String getCurrentTimestampSelectString() { + throw new UnsupportedOperationException( "Database not known to define a current timestamp function" ); + } + + /** + * The name of the database-specific SQL function for retrieving the + * current timestamp. + * + * @return The function name. + */ + public String getCurrentTimestampSQLFunctionName() { + // the standard SQL function name is current_timestamp... + return "current_timestamp"; + } + + + // SQLException support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Build an instance of the SQLExceptionConverter preferred by this dialect for + * converting SQLExceptions into Hibernate's JDBCException hierarchy. The default + * Dialect implementation simply returns a converter based on X/Open SQLState codes. + *

    + * It is strongly recommended that specific Dialect implementations override this + * method, since interpretation of a SQL error is much more accurate when based on + * the ErrorCode rather than the SQLState. Unfortunately, the ErrorCode is a vendor- + * specific approach. + * + * @return The Dialect's preferred SQLExceptionConverter. + */ + public SQLExceptionConverter buildSQLExceptionConverter() { + // The default SQLExceptionConverter for all dialects is based on SQLState + // since SQLErrorCode is extremely vendor-specific. Specific Dialects + // may override to return whatever is most appropriate for that vendor. + return new SQLStateConverter( getViolatedConstraintNameExtracter() ); + } + + private static final ViolatedConstraintNameExtracter EXTRACTER = new ViolatedConstraintNameExtracter() { + public String extractConstraintName(SQLException sqle) { + return null; + } + }; + + public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { + return EXTRACTER; + } + + + // union subclass support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Given a {@link java.sql.Types} type code, determine an appropriate + * null value to use in a select clause. + *

    + * One thing to consider here is that certain databases might + * require proper casting for the nulls here since the select here + * will be part of a UNION/UNION ALL. + * + * @param sqlType The {@link java.sql.Types} type code. + * @return The appropriate select clause value fragment. + */ + public String getSelectClauseNullString(int sqlType) { + return "null"; + } + + /** + * Does this dialect support UNION ALL, which is generally a faster + * variant of UNION? + * + * @return True if UNION ALL is supported; false otherwise. + */ + public boolean supportsUnionAll() { + return false; + } + + + // miscellaneous support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + /** + * Create a {@link org.hibernate.sql.JoinFragment} strategy responsible + * for handling this dialect's variations in how joins are handled. + * + * @return This dialect's {@link org.hibernate.sql.JoinFragment} strategy. + */ + public JoinFragment createOuterJoinFragment() { + return new ANSIJoinFragment(); + } + + /** + * Create a {@link org.hibernate.sql.CaseFragment} strategy responsible + * for handling this dialect's variations in how CASE statements are + * handled. + * + * @return This dialect's {@link org.hibernate.sql.CaseFragment} strategy. + */ + public CaseFragment createCaseFragment() { + return new ANSICaseFragment(); + } + + /** + * The fragment used to insert a row without specifying any column values. + * This is not possible on some databases. + * + * @return The appropriate empty values clause. + */ + public String getNoColumnsInsertString() { + return "values ( )"; + } + + /** + * The name of the SQL function that transforms a string to + * lowercase + * + * @return The dialect-specific lowercase function. + */ + public String getLowercaseFunction() { + return "lower"; + } + + /** + * Meant as a means for end users to affect the select strings being sent + * to the database and perhaps manipulate them in some fashion. + *

    + * The recommend approach is to instead use + * {@link org.hibernate.Interceptor#onPrepareStatement(String)}. + * + * @param select The select command + * @return The mutated select command, or the same as was passed in. + */ + public String transformSelectString(String select) { + return select; + } + + /** + * What is the maximum length Hibernate can use for generated aliases? + * + * @return The maximum length. + */ + public int getMaxAliasLength() { + return 10; + } + + /** + * The SQL literal value to which this database maps boolean values. + * + * @param bool The boolean value + * @return The appropriate SQL literal. + */ + public String toBooleanValueString(boolean bool) { + return bool ? "1" : "0"; + } + + + // identifier quoting support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * The character specific to this dialect used to begin a quoted identifier. + * + * @return The dialect's specific open quote character. + */ + public char openQuote() { + return '"'; + } + + /** + * The character specific to this dialect used to close a quoted identifier. + * + * @return The dialect's specific close quote character. + */ + public char closeQuote() { + return '"'; + } + + /** + * Apply dialect-specific quoting. + *

    + * By default, the incoming value is checked to see if its first character + * is the back-tick (`). If so, the dialect specific quoting is applied. + * + * @param column The value to be quoted. + * @return The quoted (or unmodified, if not starting with back-tick) value. + * @see #openQuote() + * @see #closeQuote() + */ + public final String quote(String column) { + if ( column.charAt( 0 ) == '`' ) { + return openQuote() + column.substring( 1, column.length() - 1 ) + closeQuote(); + } + else { + return column; + } + } + + + // DDL support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Does this dialect support the ALTER TABLE syntax? + * + * @return True if we support altering of tables; false otherwise. + */ + public boolean hasAlterTable() { + return true; + } + + /** + * Do we need to drop constraints before dropping tables in this dialect? + * + * @return True if constraints must be dropped prior to dropping + * the table; false otherwise. + */ + public boolean dropConstraints() { + return true; + } + + /** + * Do we need to qualify index names with the schema name? + * + * @return boolean + */ + public boolean qualifyIndexName() { + return true; + } + + /** + * Does this dialect support the UNIQUE column syntax? + * + * @return boolean + */ + public boolean supportsUnique() { + return true; + } + + /** + * Does this dialect support adding Unique constraints via create and alter table ? + * @return boolean + */ + public boolean supportsUniqueConstraintInCreateAlterTable() { + return true; + } + + /** + * The syntax used to add a column to a table (optional). + * + * @return The "add column" fragment. + */ + public String getAddColumnString() { + throw new UnsupportedOperationException( "No add column syntax supported by Dialect" ); + } + + public String getDropForeignKeyString() { + return " drop constraint "; + } + + public String getTableTypeString() { + // grrr... for differentiation of mysql storage engines + return ""; + } + + /** + * The syntax used to add a foreign key constraint to a table. + * + * @param constraintName The FK constraint name. + * @param foreignKey The names of the columns comprising the FK + * @param referencedTable The table referenced by the FK + * @param primaryKey The explicit columns in the referencedTable referenced + * by this FK. + * @param referencesPrimaryKey if false, constraint should be + * explicit about which column names the constraint refers to + * + * @return the "add FK" fragment + */ + public String getAddForeignKeyConstraintString( + String constraintName, + String[] foreignKey, + String referencedTable, + String[] primaryKey, + boolean referencesPrimaryKey) { + StringBuffer res = new StringBuffer( 30 ); + + res.append( " add constraint " ) + .append( constraintName ) + .append( " foreign key (" ) + .append( StringHelper.join( ", ", foreignKey ) ) + .append( ") references " ) + .append( referencedTable ); + + if ( !referencesPrimaryKey ) { + res.append( " (" ) + .append( StringHelper.join( ", ", primaryKey ) ) + .append( ')' ); + } + + return res.toString(); + } + + /** + * The syntax used to add a primary key constraint to a table. + * + * @param constraintName The name of the PK constraint. + * @return The "add PK" fragment + */ + public String getAddPrimaryKeyConstraintString(String constraintName) { + return " add constraint " + constraintName + " primary key "; + } + + public boolean hasSelfReferentialForeignKeyBug() { + return false; + } + + /** + * The keyword used to specify a nullable column. + * + * @return String + */ + public String getNullColumnString() { + return ""; + } + + public boolean supportsCommentOn() { + return false; + } + + public String getTableComment(String comment) { + return ""; + } + + public String getColumnComment(String comment) { + return ""; + } + + public boolean supportsIfExistsBeforeTableName() { + return false; + } + + public boolean supportsIfExistsAfterTableName() { + return false; + } + + /** + * Does this dialect support column-level check constraints? + * + * @return True if column-level CHECK constraints are supported; false + * otherwise. + */ + public boolean supportsColumnCheck() { + return true; + } + + /** + * Does this dialect support table-level check constraints? + * + * @return True if table-level CHECK constraints are supported; false + * otherwise. + */ + public boolean supportsTableCheck() { + return true; + } + + public boolean supportsCascadeDelete() { + return true; + } + + public boolean supportsNotNullUnique() { + return true; + } + + /** + * Completely optional cascading drop clause + * + * @return String + */ + public String getCascadeConstraintsString() { + return ""; + } + + + // Informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Does this dialect support empty IN lists? + *

    + * For example, is [where XYZ in ()] a supported construct? + * + * @return True if empty in lists are supported; false otherwise. + * @since 3.2 + */ + public boolean supportsEmptyInList() { + return true; + } + + /** + * Are string comparisons implicitly case insensitive. + *

    + * In other words, does [where 'XYZ' = 'xyz'] resolve to true? + * + * @return True if comparisons are case insensitive. + * @since 3.2 + */ + public boolean areStringComparisonsCaseInsensitive() { + return false; + } + + /** + * Is this dialect known to support what ANSI-SQL terms "row value + * constructor" syntax; sometimes called tuple syntax. + *

    + * Basically, does it support syntax like + * "... where (FIRST_NAME, LAST_NAME) = ('Steve', 'Ebersole') ...". + * + * @return True if this SQL dialect is known to support "row value + * constructor" syntax; false otherwise. + * @since 3.2 + */ + public boolean supportsRowValueConstructorSyntax() { + // return false here, as most databases do not properly support this construct... + return false; + } + + /** + * If the dialect supports {@link #supportsRowValueConstructorSyntax() row values}, + * does it offer such support in IN lists as well? + *

    + * For example, "... where (FIRST_NAME, LAST_NAME) IN ( (?, ?), (?, ?) ) ..." + * + * @return True if this SQL dialect is known to support "row value + * constructor" syntax in the IN list; false otherwise. + * @since 3.2 + */ + public boolean supportsRowValueConstructorSyntaxInInList() { + return false; + } + + /** + * Should LOBs (both BLOB and CLOB) be bound using stream operations (i.e. + * {@link java.sql.PreparedStatement#setBinaryStream}). + * + * @return True if BLOBs and CLOBs should be bound using stream operations. + * @since 3.2 + */ + public boolean useInputStreamToInsertBlob() { + return true; + } + + /** + * Does this dialect support parameters within the select clause of + * INSERT ... SELECT ... statements? + * + * @return True if this is supported; false otherwise. + * @since 3.2 + */ + public boolean supportsParametersInInsertSelect() { + return true; + } + + /** + * Does this dialect support asking the result set its positioning + * information on forward only cursors. Specifically, in the case of + * scrolling fetches, Hibernate needs to use + * {@link java.sql.ResultSet#isAfterLast} and + * {@link java.sql.ResultSet#isBeforeFirst}. Certain drivers do not + * allow access to these methods for forward only cursors. + *

    + * NOTE : this is highly driver dependent! + * + * @return True if methods like {@link java.sql.ResultSet#isAfterLast} and + * {@link java.sql.ResultSet#isBeforeFirst} are supported for forward + * only cursors; false otherwise. + * @since 3.2 + */ + public boolean supportsResultSetPositionQueryMethodsOnForwardOnlyCursor() { + return true; + } + + /** + * Does this dialect support definition of cascade delete constraints + * which can cause circular chains? + * + * @return True if circular cascade delete constraints are supported; false + * otherwise. + * @since 3.2 + */ + public boolean supportsCircularCascadeDeleteConstraints() { + return true; + } + + /** + * Are subselects supported as the left-hand-side (LHS) of + * IN-predicates. + *

    + * In other words, is syntax like "... IN (1, 2, 3) ..." supported? + * + * @return True if subselects can appear as the LHS of an in-predicate; + * false otherwise. + * @since 3.2 + */ + public boolean supportsSubselectAsInPredicateLHS() { + return true; + } + + /** + * Expected LOB usage pattern is such that I can perform an insert + * via prepared statement with a parameter binding for a LOB value + * without crazy casting to JDBC driver implementation-specific classes... + *

    + * Part of the trickiness here is the fact that this is largely + * driver dependent. For example, Oracle (which is notoriously bad with + * LOB support in their drivers historically) actually does a pretty good + * job with LOB support as of the 10.2.x versions of their drivers... + * + * @return True if normal LOB usage patterns can be used with this driver; + * false if driver-specific hookiness needs to be applied. + * @since 3.2 + */ + public boolean supportsExpectedLobUsagePattern() { + return true; + } + + /** + * Does the dialect support propogating changes to LOB + * values back to the database? Talking about mutating the + * internal value of the locator as opposed to supplying a new + * locator instance... + *

    + * For BLOBs, the internal value might be changed by: + * {@link java.sql.Blob#setBinaryStream}, + * {@link java.sql.Blob#setBytes(long, byte[])}, + * {@link java.sql.Blob#setBytes(long, byte[], int, int)}, + * or {@link java.sql.Blob#truncate(long)}. + *

    + * For CLOBs, the internal value might be changed by: + * {@link java.sql.Clob#setAsciiStream(long)}, + * {@link java.sql.Clob#setCharacterStream(long)}, + * {@link java.sql.Clob#setString(long, String)}, + * {@link java.sql.Clob#setString(long, String, int, int)}, + * or {@link java.sql.Clob#truncate(long)}. + *

    + * NOTE : I do not know the correct answer currently for + * databases which (1) are not part of the cruise control process + * or (2) do not {@link #supportsExpectedLobUsagePattern}. + * + * @return True if the changes are propogated back to the + * database; false otherwise. + * @since 3.2 + */ + public boolean supportsLobValueChangePropogation() { + return true; + } + + /** + * Is it supported to materialize a LOB locator outside the transaction in + * which it was created? + *

    + * Again, part of the trickiness here is the fact that this is largely + * driver dependent. + *

    + * NOTE: all database I have tested which {@link #supportsExpectedLobUsagePattern()} + * also support the ability to materialize a LOB outside the owning transaction... + * + * @return True if unbounded materialization is supported; false otherwise. + * @since 3.2 + */ + public boolean supportsUnboundedLobLocatorMaterialization() { + return true; + } + + /** + * Does this dialect support referencing the table being mutated in + * a subquery. The "table being mutated" is the table referenced in + * an UPDATE or a DELETE query. And so can that table then be + * referenced in a subquery of said UPDATE/DELETE query. + *

    + * For example, would the following two syntaxes be supported:

      + *
    • delete from TABLE_A where ID not in ( select ID from TABLE_A )
    • + *
    • update TABLE_A set NON_ID = 'something' where ID in ( select ID from TABLE_A)
    • + *
    + * + * @return True if this dialect allows references the mutating table from + * a subquery. + * @since 3.2 + */ + public boolean supportsSubqueryOnMutatingTable() { + return true; + } + + /** + * Does the dialect support an exists statement in the select clause? + * + * @return True if exists checks are allowed in the select clause; false otherwise. + */ + public boolean supportsExistsInSelect() { + return true; + } + + /** + * For the underlying database, is READ_COMMITTED isolation implemented by + * forcing readers to wait for write locks to be released? + * + * @return True if writers block readers to achieve READ_COMMITTED; false otherwise. + */ + public boolean doesReadCommittedCauseWritersToBlockReaders() { + return false; + } + + /** + * For the underlying database, is REPEATABLE_READ isolation implemented by + * forcing writers to wait for read locks to be released? + * + * @return True if readers block writers to achieve REPEATABLE_READ; false otherwise. + */ + public boolean doesRepeatableReadCauseReadersToBlockWriters() { + return false; + } + + /** + * Does this dialect support using a JDBC bind parameter as an argument + * to a function or procedure call? + * + * @return True if the database supports accepting bind params as args; false otherwise. + */ + public boolean supportsBindAsCallableArgument() { + return true; + } +} diff --git a/src/org/hibernate/dialect/DialectFactory.java b/src/org/hibernate/dialect/DialectFactory.java new file mode 100644 index 0000000000..a79da427d0 --- /dev/null +++ b/src/org/hibernate/dialect/DialectFactory.java @@ -0,0 +1,141 @@ +// $Id$ +package org.hibernate.dialect; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.util.ReflectHelper; + +/** + * A factory for generating Dialect instances. + * + * @author Steve Ebersole + */ +public class DialectFactory { + + /** + * Builds an appropriate Dialect instance. + *

    + * If a dialect is explicitly named in the incoming properties, it is used. Otherwise, the database name and version + * (obtained from connection metadata) are used to make the dertemination. + *

    + * An exception is thrown if a dialect was not explicitly set and the database name is not known. + * + * @param props The configuration properties. + * @param databaseName The name of the database product (obtained from metadata). + * @param databaseMajorVersion The major version of the database product (obtained from metadata). + * + * @return The appropriate dialect. + * + * @throws HibernateException No dialect specified and database name not known. + */ + public static Dialect buildDialect(Properties props, String databaseName, int databaseMajorVersion) + throws HibernateException { + String dialectName = props.getProperty( Environment.DIALECT ); + if ( dialectName == null ) { + return determineDialect( databaseName, databaseMajorVersion ); + } + else { + return buildDialect( dialectName ); + } + } + + /** + * Determine the appropriate Dialect to use given the database product name + * and major version. + * + * @param databaseName The name of the database product (obtained from metadata). + * @param databaseMajorVersion The major version of the database product (obtained from metadata). + * + * @return An appropriate dialect instance. + */ + public static Dialect determineDialect(String databaseName, int databaseMajorVersion) { + if ( databaseName == null ) { + throw new HibernateException( "Hibernate Dialect must be explicitly set" ); + } + + DatabaseDialectMapper mapper = ( DatabaseDialectMapper ) MAPPERS.get( databaseName ); + if ( mapper == null ) { + throw new HibernateException( "Hibernate Dialect must be explicitly set for database: " + databaseName ); + } + + String dialectName = mapper.getDialectClass( databaseMajorVersion ); + return buildDialect( dialectName ); + } + + /** + * Returns a dialect instance given the name of the class to use. + * + * @param dialectName The name of the dialect class. + * + * @return The dialect instance. + */ + public static Dialect buildDialect(String dialectName) { + try { + return ( Dialect ) ReflectHelper.classForName( dialectName ).newInstance(); + } + catch ( ClassNotFoundException cnfe ) { + throw new HibernateException( "Dialect class not found: " + dialectName ); + } + catch ( Exception e ) { + throw new HibernateException( "Could not instantiate dialect class", e ); + } + } + + /** + * For a given database product name, instances of + * DatabaseDialectMapper know which Dialect to use for different versions. + */ + public static interface DatabaseDialectMapper { + public String getDialectClass(int majorVersion); + } + + /** + * A simple DatabaseDialectMapper for dialects which are independent + * of the underlying database product version. + */ + public static class VersionInsensitiveMapper implements DatabaseDialectMapper { + private String dialectClassName; + + public VersionInsensitiveMapper(String dialectClassName) { + this.dialectClassName = dialectClassName; + } + + public String getDialectClass(int majorVersion) { + return dialectClassName; + } + } + + private static final Map MAPPERS = new HashMap(); + static { + // TODO : this is the stuff it'd be nice to move to a properties file or some other easily user-editable place + MAPPERS.put( "HSQL Database Engine", new VersionInsensitiveMapper( "org.hibernate.dialect.HSQLDialect" ) ); + MAPPERS.put( "DB2/NT", new VersionInsensitiveMapper( "org.hibernate.dialect.DB2Dialect" ) ); + MAPPERS.put( "DB2/LINUX", new VersionInsensitiveMapper( "org.hibernate.dialect.DB2Dialect" ) ); + MAPPERS.put( "MySQL", new VersionInsensitiveMapper( "org.hibernate.dialect.MySQLDialect" ) ); + MAPPERS.put( "PostgreSQL", new VersionInsensitiveMapper( "org.hibernate.dialect.PostgreSQLDialect" ) ); + MAPPERS.put( "Microsoft SQL Server Database", new VersionInsensitiveMapper( "org.hibernate.dialect.SQLServerDialect" ) ); + MAPPERS.put( "Microsoft SQL Server", new VersionInsensitiveMapper( "org.hibernate.dialect.SQLServerDialect" ) ); + MAPPERS.put( "Sybase SQL Server", new VersionInsensitiveMapper( "org.hibernate.dialect.SybaseDialect" ) ); + MAPPERS.put( "Adaptive Server Enterprise", new VersionInsensitiveMapper( "org.hibernate.dialect.SybaseDialect" ) ); + MAPPERS.put( "Informix Dynamic Server", new VersionInsensitiveMapper( "org.hibernate.dialect.InformixDialect" ) ); + MAPPERS.put( "Apache Derby", new VersionInsensitiveMapper( "org.hibernate.dialect.DerbyDialect" ) ); + + MAPPERS.put( + "Oracle", + new DatabaseDialectMapper() { + public String getDialectClass(int majorVersion) { + switch ( majorVersion ) { + case 8: return Oracle8iDialect.class.getName(); + case 9: return Oracle9iDialect.class.getName(); + case 10: return Oracle10gDialect.class.getName(); + default: throw new HibernateException( "unknown Oracle major version [" + majorVersion + "]" ); + } + } + } + ); + } +} diff --git a/src/org/hibernate/dialect/FirebirdDialect.java b/src/org/hibernate/dialect/FirebirdDialect.java new file mode 100644 index 0000000000..e23e4fa007 --- /dev/null +++ b/src/org/hibernate/dialect/FirebirdDialect.java @@ -0,0 +1,29 @@ +//$Id$ +package org.hibernate.dialect; + +/** + * An SQL dialect for Firebird. + * @author Reha CENANI + */ +public class FirebirdDialect extends InterbaseDialect { + + public String getDropSequenceString(String sequenceName) { + return "drop generator " + sequenceName; + } + + public String getLimitString(String sql, boolean hasOffset) { + return new StringBuffer( sql.length()+20 ) + .append(sql) + .insert(6, hasOffset ? " first ? skip ?" : " first ?") + .toString(); + } + + public boolean bindLimitParametersFirst() { + return true; + } + + public boolean bindLimitParametersInReverseOrder() { + return true; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/FrontBaseDialect.java b/src/org/hibernate/dialect/FrontBaseDialect.java new file mode 100644 index 0000000000..c04a2495a7 --- /dev/null +++ b/src/org/hibernate/dialect/FrontBaseDialect.java @@ -0,0 +1,91 @@ +//$Id$ +package org.hibernate.dialect; + +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.UpdateLockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.LockMode; + +import java.sql.Types; + +/** + * An SQL Dialect for Frontbase. Assumes you're using the latest version + * of the FrontBase JDBC driver, available from http://frontbase.com/ + *

    + * NOTE: The latest JDBC driver is not always included with the + * latest release of FrontBase. Download the driver separately, and enjoy + * the informative release notes. + *

    + * This dialect was tested with JDBC driver version 2.3.1. This driver + * contains a bug that causes batches of updates to fail. (The bug should be + * fixed in the next release of the JDBC driver.) If you are using JDBC driver + * 2.3.1, you can work-around this problem by setting the following in your + * hibernate.properties file: hibernate.jdbc.batch_size=15 + * + * @author Ron Lussier rlussier@lenscraft.com + */ +public class FrontBaseDialect extends Dialect { + + public FrontBaseDialect() { + super(); + + registerColumnType( Types.BIT, "bit" ); + registerColumnType( Types.BIGINT, "longint" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "tinyint" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "bit varying($l)" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + registerColumnType( Types.BLOB, "blob" ); + registerColumnType( Types.CLOB, "clob" ); + } + + public String getAddColumnString() { + return "add column"; + } + + public String getCascadeConstraintsString() { + return " cascade"; + } + + public boolean dropConstraints() { + return false; + } + + /** + * Does this dialect support the FOR UPDATE syntax. No! + * + * @return false always. FrontBase doesn't support this syntax, + * which was dropped with SQL92 + */ + public String getForUpdateString() { + return ""; + } + + public String getCurrentTimestampCallString() { + // TODO : not sure this is correct, could not find docs on how to do this. + return "{?= call current_timestamp}"; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return true; + } + + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + // Frontbase has no known variation of a "SELECT ... FOR UPDATE" syntax... + if ( lockMode.greaterThan( LockMode.READ ) ) { + return new UpdateLockingStrategy( lockable, lockMode ); + } + else { + return new SelectLockingStrategy( lockable, lockMode ); + } + } +} diff --git a/src/org/hibernate/dialect/H2Dialect.java b/src/org/hibernate/dialect/H2Dialect.java new file mode 100644 index 0000000000..496a916575 --- /dev/null +++ b/src/org/hibernate/dialect/H2Dialect.java @@ -0,0 +1,301 @@ +package org.hibernate.dialect; + +import java.sql.SQLException; +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.MappingException; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.hibernate.util.ReflectHelper; + +/** + * A dialect compatible with the H2 database. + * + * @author Thomas Mueller + * + */ +public class H2Dialect extends Dialect { + + private String querySequenceString; + public H2Dialect() { + super(); + + querySequenceString = "select sequence_name from information_schema.sequences"; + try { + // HHH-2300 + Class constants = ReflectHelper.classForName( "org.h2.engine.Constants" ); + Integer build = (Integer)constants.getDeclaredField("BUILD_ID" ).get(null); + int buildid = build.intValue(); + if(buildid < 32) { + querySequenceString = "select name from information_schema.sequences"; + } + } catch(Throwable e) { + // ignore (probably H2 not in the classpath) + } + registerColumnType(Types.BOOLEAN, "boolean"); + registerColumnType(Types.BIGINT, "bigint"); + registerColumnType(Types.BINARY, "binary"); + registerColumnType(Types.BIT, "bit"); + registerColumnType(Types.CHAR, "char($l)"); + registerColumnType(Types.DATE, "date"); + registerColumnType(Types.DECIMAL, "decimal($p,$s)"); + registerColumnType(Types.DOUBLE, "double"); + registerColumnType(Types.FLOAT, "float"); + registerColumnType(Types.INTEGER, "integer"); + registerColumnType(Types.LONGVARBINARY, "longvarbinary"); + registerColumnType(Types.LONGVARCHAR, "longvarchar"); + registerColumnType(Types.REAL, "real"); + registerColumnType(Types.SMALLINT, "smallint"); + registerColumnType(Types.TINYINT, "tinyint"); + registerColumnType(Types.TIME, "time"); + registerColumnType(Types.TIMESTAMP, "timestamp"); + registerColumnType(Types.VARCHAR, "varchar($l)"); + registerColumnType(Types.VARBINARY, "binary($l)"); + registerColumnType(Types.NUMERIC, "numeric"); + registerColumnType(Types.BLOB, "blob"); + registerColumnType(Types.CLOB, "clob"); + + // select topic, syntax from information_schema.help + // where section like 'Function%' order by section, topic + +// registerFunction("abs", new StandardSQLFunction("abs")); + registerFunction("acos", new StandardSQLFunction("acos", Hibernate.DOUBLE)); + registerFunction("asin", new StandardSQLFunction("asin", Hibernate.DOUBLE)); + registerFunction("atan", new StandardSQLFunction("atan", Hibernate.DOUBLE)); + registerFunction("atan2", new StandardSQLFunction("atan2", Hibernate.DOUBLE)); + registerFunction("bitand", new StandardSQLFunction("bitand", Hibernate.INTEGER)); + registerFunction("bitor", new StandardSQLFunction("bitor", Hibernate.INTEGER)); + registerFunction("bitxor", new StandardSQLFunction("bitxor", Hibernate.INTEGER)); + registerFunction("ceiling", new StandardSQLFunction("ceiling", Hibernate.DOUBLE)); + registerFunction("cos", new StandardSQLFunction("cos", Hibernate.DOUBLE)); + registerFunction("cot", new StandardSQLFunction("cot", Hibernate.DOUBLE)); + registerFunction("degrees", new StandardSQLFunction("degrees", Hibernate.DOUBLE)); + registerFunction("exp", new StandardSQLFunction("exp", Hibernate.DOUBLE)); + registerFunction("floor", new StandardSQLFunction("floor", Hibernate.DOUBLE)); + registerFunction("log", new StandardSQLFunction("log", Hibernate.DOUBLE)); + registerFunction("log10", new StandardSQLFunction("log10", Hibernate.DOUBLE)); +// registerFunction("mod", new StandardSQLFunction("mod", Hibernate.INTEGER)); + registerFunction("pi", new NoArgSQLFunction("pi", Hibernate.DOUBLE)); + registerFunction("power", new StandardSQLFunction("power", Hibernate.DOUBLE)); + registerFunction("radians", new StandardSQLFunction("radians", Hibernate.DOUBLE)); + registerFunction("rand", new NoArgSQLFunction("rand", Hibernate.DOUBLE)); + registerFunction("round", new StandardSQLFunction("round", Hibernate.DOUBLE)); + registerFunction("roundmagic", new StandardSQLFunction("roundmagic", Hibernate.DOUBLE)); + registerFunction("sign", new StandardSQLFunction("sign", Hibernate.INTEGER)); + registerFunction("sin", new StandardSQLFunction("sin", Hibernate.DOUBLE)); +// registerFunction("sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE)); + registerFunction("tan", new StandardSQLFunction("tan", Hibernate.DOUBLE)); + registerFunction("truncate", new StandardSQLFunction("truncate", Hibernate.DOUBLE)); + + registerFunction("compress", new StandardSQLFunction("compress", Hibernate.BINARY)); + registerFunction("expand", new StandardSQLFunction("compress", Hibernate.BINARY)); + registerFunction("decrypt", new StandardSQLFunction("decrypt", Hibernate.BINARY)); + registerFunction("encrypt", new StandardSQLFunction("encrypt", Hibernate.BINARY)); + registerFunction("hash", new StandardSQLFunction("hash", Hibernate.BINARY)); + + registerFunction("ascii", new StandardSQLFunction("ascii", Hibernate.INTEGER)); +// registerFunction("bit_length", new StandardSQLFunction("bit_length", Hibernate.INTEGER)); + registerFunction("char", new StandardSQLFunction("char", Hibernate.CHARACTER)); + registerFunction("concat", new VarArgsSQLFunction(Hibernate.STRING, "(", "||", ")")); + registerFunction("difference", new StandardSQLFunction("difference", Hibernate.INTEGER)); + registerFunction("hextoraw", new StandardSQLFunction("hextoraw", Hibernate.STRING)); + registerFunction("lower", new StandardSQLFunction("lower", Hibernate.STRING)); + registerFunction("insert", new StandardSQLFunction("lower", Hibernate.STRING)); + registerFunction("left", new StandardSQLFunction("left", Hibernate.STRING)); +// registerFunction("length", new StandardSQLFunction("length", Hibernate.INTEGER)); +// registerFunction("locate", new StandardSQLFunction("locate", Hibernate.INTEGER)); +// registerFunction("lower", new StandardSQLFunction("lower", Hibernate.STRING)); + registerFunction("lcase", new StandardSQLFunction("lcase", Hibernate.STRING)); + registerFunction("ltrim", new StandardSQLFunction("ltrim", Hibernate.STRING)); + registerFunction("octet_length", new StandardSQLFunction("octet_length", Hibernate.INTEGER)); + registerFunction("position", new StandardSQLFunction("position", Hibernate.INTEGER)); + registerFunction("rawtohex", new StandardSQLFunction("rawtohex", Hibernate.STRING)); + registerFunction("repeat", new StandardSQLFunction("repeat", Hibernate.STRING)); + registerFunction("replace", new StandardSQLFunction("replace", Hibernate.STRING)); + registerFunction("right", new StandardSQLFunction("right", Hibernate.STRING)); + registerFunction("rtrim", new StandardSQLFunction("rtrim", Hibernate.STRING)); + registerFunction("soundex", new StandardSQLFunction("soundex", Hibernate.STRING)); + registerFunction("space", new StandardSQLFunction("space", Hibernate.STRING)); + registerFunction("stringencode", new StandardSQLFunction("stringencode", Hibernate.STRING)); + registerFunction("stringdecode", new StandardSQLFunction("stringdecode", Hibernate.STRING)); +// registerFunction("substring", new StandardSQLFunction("substring", Hibernate.STRING)); +// registerFunction("upper", new StandardSQLFunction("upper", Hibernate.STRING)); + registerFunction("ucase", new StandardSQLFunction("ucase", Hibernate.STRING)); + + registerFunction("stringtoutf8", new StandardSQLFunction("stringtoutf8", Hibernate.BINARY)); + registerFunction("utf8tostring", new StandardSQLFunction("utf8tostring", Hibernate.STRING)); + + registerFunction("current_date", new NoArgSQLFunction("current_date", Hibernate.DATE)); + registerFunction("current_time", new NoArgSQLFunction("current_time", Hibernate.TIME)); + registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", Hibernate.TIMESTAMP)); + registerFunction("datediff", new NoArgSQLFunction("datediff", Hibernate.INTEGER)); + registerFunction("dayname", new StandardSQLFunction("dayname", Hibernate.STRING)); + registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth", Hibernate.INTEGER)); + registerFunction("dayofweek", new StandardSQLFunction("dayofweek", Hibernate.INTEGER)); + registerFunction("dayofyear", new StandardSQLFunction("dayofyear", Hibernate.INTEGER)); +// registerFunction("hour", new StandardSQLFunction("hour", Hibernate.INTEGER)); +// registerFunction("minute", new StandardSQLFunction("minute", Hibernate.INTEGER)); +// registerFunction("month", new StandardSQLFunction("month", Hibernate.INTEGER)); + registerFunction("monthname", new StandardSQLFunction("monthname", Hibernate.STRING)); + registerFunction("quater", new StandardSQLFunction("quater", Hibernate.INTEGER)); +// registerFunction("second", new StandardSQLFunction("second", Hibernate.INTEGER)); + registerFunction("week", new StandardSQLFunction("week", Hibernate.INTEGER)); +// registerFunction("year", new StandardSQLFunction("year", Hibernate.INTEGER)); + + registerFunction("curdate", new NoArgSQLFunction("curdate", Hibernate.DATE)); + registerFunction("curtime", new NoArgSQLFunction("curtime", Hibernate.TIME)); + registerFunction("curtimestamp", new NoArgSQLFunction("curtimestamp", Hibernate.TIME)); + registerFunction("now", new NoArgSQLFunction("now", Hibernate.TIMESTAMP)); + + registerFunction("database", new NoArgSQLFunction("database", Hibernate.STRING)); + registerFunction("user", new NoArgSQLFunction("user", Hibernate.STRING)); + + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE); + + } + + public String getAddColumnString() { + return "add column"; + } + + public boolean supportsIdentityColumns() { + return true; + } + + public String getIdentityColumnString() { + return "generated by default as identity"; // not null is implicit + } + + public String getIdentitySelectString() { + return "call identity()"; + } + + public String getIdentityInsertString() { + return "null"; + } + + public String getForUpdateString() { + return " for update"; + } + + public boolean supportsUnique() { + return true; + } + + public boolean supportsLimit() { + return true; + } + + public String getLimitString(String sql, boolean hasOffset) { + return new StringBuffer(sql.length() + 20). + append(sql). + append(hasOffset ? " limit ? offset ?" : " limit ?"). + toString(); + } + + public boolean bindLimitParametersInReverseOrder() { + return true; + } + + public boolean bindLimitParametersFirst() { + return false; + } + + public boolean supportsIfExistsAfterTableName() { + return true; + } + + + public boolean supportsPooledSequences() { + return true; + } + + protected String getCreateSequenceString(String sequenceName) throws MappingException { + return "create sequence " + sequenceName; + } + + + protected String getDropSequenceString(String sequenceName) throws MappingException { + return "drop sequence " + sequenceName; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return "next value for " + sequenceName; + } + + public String getSequenceNextValString(String sequenceName) { + return "call next value for " + sequenceName; + } + + public String getQuerySequencesString() { + return querySequenceString; + } + + public boolean supportsSequences() { + return true; + } + + public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { + return EXTRACTER; + } + + private static ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() { + + /** + * Extract the name of the violated constraint from the given SQLException. + * + * @param sqle The exception that was the result of the constraint violation. + * @return The extracted constraint name. + */ + public String extractConstraintName(SQLException sqle) { + String constraintName = null; + // 23000: Check constraint violation: {0} + // 23001: Unique index or primary key violation: {0} + if(sqle.getSQLState().startsWith("23")) { + String message = sqle.getMessage(); + int idx = message.indexOf("violation: "); + if(idx > 0) { + constraintName = message.substring(idx + "violation: ".length()); + } + } + return constraintName; + } + + }; + + public boolean supportsTemporaryTables() { + return true; + } + + public String getCreateTemporaryTableString() { + return "create temporary table if not exists"; + } + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + public String getCurrentTimestampSelectString() { + return "call current_timestamp()"; + } + + public boolean supportsUnionAll() { + return true; + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsLobValueChangePropogation() { + return false; + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/HSQLDialect.java b/src/org/hibernate/dialect/HSQLDialect.java new file mode 100644 index 0000000000..860ecd74fc --- /dev/null +++ b/src/org/hibernate/dialect/HSQLDialect.java @@ -0,0 +1,326 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.SQLException; +import java.sql.Types; +import java.io.Serializable; + +import org.hibernate.Hibernate; +import org.hibernate.LockMode; +import org.hibernate.StaleObjectStateException; +import org.hibernate.JDBCException; +import org.hibernate.MappingException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.util.ReflectHelper; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * An SQL dialect compatible with HSQLDB (Hypersonic SQL). + *

    + * Note this version supports HSQLDB version 1.8 and higher, only. + * + * @author Christoph Sturm, Phillip Baird + */ +public class HSQLDialect extends Dialect { + + private static final Log log = LogFactory.getLog( HSQLDialect.class ); + + private boolean schemaSupport; + + public HSQLDialect() { + super(); + registerColumnType( Types.BIGINT, "bigint" ); + registerColumnType( Types.BINARY, "binary" ); + registerColumnType( Types.BIT, "bit" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.DECIMAL, "decimal" ); + registerColumnType( Types.DOUBLE, "double" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.LONGVARBINARY, "longvarbinary" ); + registerColumnType( Types.LONGVARCHAR, "longvarchar" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "tinyint" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.VARBINARY, "varbinary($l)" ); + registerColumnType( Types.NUMERIC, "numeric" ); + //HSQL has no Blob/Clob support .... but just put these here for now! + registerColumnType( Types.BLOB, "longvarbinary" ); + registerColumnType( Types.CLOB, "longvarchar" ); + + registerFunction( "ascii", new StandardSQLFunction( "ascii", Hibernate.INTEGER ) ); + registerFunction( "char", new StandardSQLFunction( "char", Hibernate.CHARACTER ) ); + registerFunction( "length", new StandardSQLFunction( "length", Hibernate.LONG ) ); + registerFunction( "lower", new StandardSQLFunction( "lower" ) ); + registerFunction( "upper", new StandardSQLFunction( "upper" ) ); + registerFunction( "lcase", new StandardSQLFunction( "lcase" ) ); + registerFunction( "ucase", new StandardSQLFunction( "ucase" ) ); + registerFunction( "soundex", new StandardSQLFunction( "soundex", Hibernate.STRING ) ); + registerFunction( "ltrim", new StandardSQLFunction( "ltrim" ) ); + registerFunction( "rtrim", new StandardSQLFunction( "rtrim" ) ); + registerFunction( "reverse", new StandardSQLFunction( "reverse" ) ); + registerFunction( "space", new StandardSQLFunction( "space", Hibernate.STRING ) ); + registerFunction( "rawtohex", new StandardSQLFunction( "rawtohex" ) ); + registerFunction( "hextoraw", new StandardSQLFunction( "hextoraw" ) ); + + registerFunction( "user", new NoArgSQLFunction( "user", Hibernate.STRING ) ); + registerFunction( "database", new NoArgSQLFunction( "database", Hibernate.STRING ) ); + + registerFunction( "current_date", new NoArgSQLFunction( "current_date", Hibernate.DATE, false ) ); + registerFunction( "curdate", new NoArgSQLFunction( "curdate", Hibernate.DATE ) ); + registerFunction( "current_timestamp", new NoArgSQLFunction( "current_timestamp", Hibernate.TIMESTAMP, false ) ); + registerFunction( "now", new NoArgSQLFunction( "now", Hibernate.TIMESTAMP ) ); + registerFunction( "current_time", new NoArgSQLFunction( "current_time", Hibernate.TIME, false ) ); + registerFunction( "curtime", new NoArgSQLFunction( "curtime", Hibernate.TIME ) ); + registerFunction( "day", new StandardSQLFunction( "day", Hibernate.INTEGER ) ); + registerFunction( "dayofweek", new StandardSQLFunction( "dayofweek", Hibernate.INTEGER ) ); + registerFunction( "dayofyear", new StandardSQLFunction( "dayofyear", Hibernate.INTEGER ) ); + registerFunction( "dayofmonth", new StandardSQLFunction( "dayofmonth", Hibernate.INTEGER ) ); + registerFunction( "month", new StandardSQLFunction( "month", Hibernate.INTEGER ) ); + registerFunction( "year", new StandardSQLFunction( "year", Hibernate.INTEGER ) ); + registerFunction( "week", new StandardSQLFunction( "week", Hibernate.INTEGER ) ); + registerFunction( "quater", new StandardSQLFunction( "quater", Hibernate.INTEGER ) ); + registerFunction( "hour", new StandardSQLFunction( "hour", Hibernate.INTEGER ) ); + registerFunction( "minute", new StandardSQLFunction( "minute", Hibernate.INTEGER ) ); + registerFunction( "second", new StandardSQLFunction( "second", Hibernate.INTEGER ) ); + registerFunction( "dayname", new StandardSQLFunction( "dayname", Hibernate.STRING ) ); + registerFunction( "monthname", new StandardSQLFunction( "monthname", Hibernate.STRING ) ); + + registerFunction( "abs", new StandardSQLFunction( "abs" ) ); + registerFunction( "sign", new StandardSQLFunction( "sign", Hibernate.INTEGER ) ); + + registerFunction( "acos", new StandardSQLFunction( "acos", Hibernate.DOUBLE ) ); + registerFunction( "asin", new StandardSQLFunction( "asin", Hibernate.DOUBLE ) ); + registerFunction( "atan", new StandardSQLFunction( "atan", Hibernate.DOUBLE ) ); + registerFunction( "cos", new StandardSQLFunction( "cos", Hibernate.DOUBLE ) ); + registerFunction( "cot", new StandardSQLFunction( "cot", Hibernate.DOUBLE ) ); + registerFunction( "exp", new StandardSQLFunction( "exp", Hibernate.DOUBLE ) ); + registerFunction( "log", new StandardSQLFunction( "log", Hibernate.DOUBLE ) ); + registerFunction( "log10", new StandardSQLFunction( "log10", Hibernate.DOUBLE ) ); + registerFunction( "sin", new StandardSQLFunction( "sin", Hibernate.DOUBLE ) ); + registerFunction( "sqrt", new StandardSQLFunction( "sqrt", Hibernate.DOUBLE ) ); + registerFunction( "tan", new StandardSQLFunction( "tan", Hibernate.DOUBLE ) ); + registerFunction( "pi", new NoArgSQLFunction( "pi", Hibernate.DOUBLE ) ); + registerFunction( "rand", new StandardSQLFunction( "rand", Hibernate.FLOAT ) ); + + registerFunction( "radians", new StandardSQLFunction( "radians", Hibernate.DOUBLE ) ); + registerFunction( "degrees", new StandardSQLFunction( "degrees", Hibernate.DOUBLE ) ); + registerFunction( "roundmagic", new StandardSQLFunction( "roundmagic" ) ); + + registerFunction( "ceiling", new StandardSQLFunction( "ceiling" ) ); + registerFunction( "floor", new StandardSQLFunction( "floor" ) ); + + // Multi-param dialect functions... + registerFunction( "mod", new StandardSQLFunction( "mod", Hibernate.INTEGER ) ); + + // function templates + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(", "||", ")" ) ); + + getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); + + try { + // Does present HSQLDB Database class support schemas? + // yuck! Perhaps we should think about a new dialect? Especially + // if more things are going to break back-compat moving forward + ReflectHelper.classForName( "org.hsqldb.Database" ).getDeclaredField( "schemaManager" ); + schemaSupport = true; + } + catch (Throwable t) { + schemaSupport = false; + } + } + + public String getAddColumnString() { + return "add column"; + } + + public boolean supportsIdentityColumns() { + return true; + } + + public String getIdentityColumnString() { + return "generated by default as identity (start with 1)"; //not null is implicit + } + + public String getIdentitySelectString() { + return "call identity()"; + } + + public String getIdentityInsertString() { + return "null"; + } + + public String getForUpdateString() { + return ""; + } + + public boolean supportsUnique() { + return false; + } + + public boolean supportsLimit() { + return true; + } + + public String getLimitString(String sql, boolean hasOffset) { + return new StringBuffer( sql.length() + 10 ) + .append( sql ) + .insert( sql.toLowerCase().indexOf( "select" ) + 6, hasOffset ? " limit ? ?" : " top ?" ) + .toString(); + } + + public boolean bindLimitParametersFirst() { + return true; + } + + public boolean supportsIfExistsAfterTableName() { + return true; + } + + public boolean supportsColumnCheck() { + return false; + } + + public boolean supportsSequences() { + return true; + } + + public boolean supportsPooledSequences() { + return true; + } + + public String[] getCreateSequenceStrings(String sequenceName) { + return getCreateSequenceStrings( sequenceName, 1, 1 ); + } + + public String[] getCreateSequenceStrings(String sequenceName, int initialValue, int incrementSize) { + return new String[] { + "create table dual_" + sequenceName + " (zero integer)", + "insert into dual_" + sequenceName + " values (0)", + "create sequence " + sequenceName + " start with " + initialValue + " increment by " + incrementSize + }; + } + + public String[] getDropSequenceStrings(String sequenceName) { + return new String[] { + "drop table dual_" + sequenceName + " if exists", + "drop sequence " + sequenceName + }; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return "next value for " + sequenceName; + } + + public String getSequenceNextValString(String sequenceName) { + return "select next value for " + sequenceName + " from dual_" + sequenceName; + } + + public String getQuerySequencesString() { + if ( schemaSupport ) { + return "select sequence_name from information_schema.system_sequences"; + } + else { + return "select sequence_name from system_sequences"; + } + } + + public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { + return EXTRACTER; + } + + private static ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() { + + /** + * Extract the name of the violated constraint from the given SQLException. + * + * @param sqle The exception that was the result of the constraint violation. + * @return The extracted constraint name. + */ + public String extractConstraintName(SQLException sqle) { + String constraintName = null; + + int errorCode = JDBCExceptionHelper.extractErrorCode( sqle ); + + if ( errorCode == -8 ) { + constraintName = extractUsingTemplate( + "Integrity constraint violation ", " table:", sqle.getMessage() + ); + } + else if ( errorCode == -9 ) { + constraintName = extractUsingTemplate( + "Violation of unique index: ", " in statement [", sqle.getMessage() + ); + } + else if ( errorCode == -104 ) { + constraintName = extractUsingTemplate( + "Unique constraint violation: ", " in statement [", sqle.getMessage() + ); + } + else if ( errorCode == -177 ) { + constraintName = extractUsingTemplate( + "Integrity constraint violation - no parent ", " table:", sqle.getMessage() + ); + } + + return constraintName; + } + + }; + + /** + * HSQL does not really support temp tables; just take advantage of the + * fact that it is a single user db... + */ + public boolean supportsTemporaryTables() { + return true; + } + + public boolean supportsCurrentTimestampSelection() { + return false; + } + + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + // HSQLDB only supports READ_UNCOMMITTED transaction isolation + return new ReadUncommittedLockingStrategy( lockable, lockMode ); + } + + public static class ReadUncommittedLockingStrategy extends SelectLockingStrategy { + public ReadUncommittedLockingStrategy(Lockable lockable, LockMode lockMode) { + super( lockable, lockMode ); + } + + public void lock(Serializable id, Object version, Object object, SessionImplementor session) + throws StaleObjectStateException, JDBCException { + if ( getLockMode().greaterThan( LockMode.READ ) ) { + log.warn( "HSQLDB supports only READ_UNCOMMITTED isolation" ); + } + super.lock( id, version, object, session ); + } + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } + + public boolean supportsLobValueChangePropogation() { + return false; + } +} diff --git a/src/org/hibernate/dialect/InformixDialect.java b/src/org/hibernate/dialect/InformixDialect.java new file mode 100644 index 0000000000..d8b5aa72d2 --- /dev/null +++ b/src/org/hibernate/dialect/InformixDialect.java @@ -0,0 +1,213 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.SQLException; +import java.sql.Types; + +import org.hibernate.MappingException; +import org.hibernate.Hibernate; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.hibernate.util.StringHelper; + +/** + * Informix dialect.
    + *
    + * Seems to work with Informix Dynamic Server Version 7.31.UD3, + * Informix JDBC driver version 2.21JC3. + * @author Steve Molitor + */ +public class InformixDialect extends Dialect { + + /** + * Creates new InformixDialect instance. Sets up the JDBC / + * Informix type mappings. + */ + public InformixDialect() { + super(); + + registerColumnType(Types.BIGINT, "int8"); + registerColumnType(Types.BINARY, "byte"); + registerColumnType(Types.BIT, "smallint"); // Informix doesn't have a bit type + registerColumnType(Types.CHAR, "char($l)"); + registerColumnType(Types.DATE, "date"); + registerColumnType(Types.DECIMAL, "decimal"); + registerColumnType(Types.DOUBLE, "float"); + registerColumnType(Types.FLOAT, "smallfloat"); + registerColumnType(Types.INTEGER, "integer"); + registerColumnType(Types.LONGVARBINARY, "blob"); // or BYTE + registerColumnType(Types.LONGVARCHAR, "clob"); // or TEXT? + registerColumnType(Types.NUMERIC, "decimal"); // or MONEY + registerColumnType(Types.REAL, "smallfloat"); + registerColumnType(Types.SMALLINT, "smallint"); + registerColumnType(Types.TIMESTAMP, "datetime year to fraction(5)"); + registerColumnType(Types.TIME, "datetime hour to second"); + registerColumnType(Types.TINYINT, "smallint"); + registerColumnType(Types.VARBINARY, "byte"); + registerColumnType(Types.VARCHAR, "varchar($l)"); + registerColumnType(Types.VARCHAR, 255, "varchar($l)"); + registerColumnType(Types.VARCHAR, 32739, "lvarchar($l)"); + + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(", "||", ")" ) ); + } + + public String getAddColumnString() { + return "add"; + } + + public boolean supportsIdentityColumns() { + return true; + } + + public String getIdentitySelectString(String table, String column, int type) + throws MappingException { + return type==Types.BIGINT ? + "select dbinfo('serial8') from systables where tabid=1" : + "select dbinfo('sqlca.sqlerrd1') from systables where tabid=1"; + } + + public String getIdentityColumnString(int type) throws MappingException { + return type==Types.BIGINT ? + "serial8 not null" : + "serial not null"; + } + + public boolean hasDataTypeInIdentityColumn() { + return false; + } + + /** + * The syntax used to add a foreign key constraint to a table. + * Informix constraint name must be at the end. + * @return String + */ + public String getAddForeignKeyConstraintString( + String constraintName, + String[] foreignKey, + String referencedTable, + String[] primaryKey, boolean referencesPrimaryKey + ) { + StringBuffer result = new StringBuffer(30); + + result.append(" add constraint ") + .append(" foreign key (") + .append( StringHelper.join(", ", foreignKey) ) + .append(") references ") + .append(referencedTable); + + if(!referencesPrimaryKey) { + result.append(" (") + .append( StringHelper.join(", ", primaryKey) ) + .append(')'); + } + + result.append(" constraint ").append(constraintName); + + return result.toString(); + } + + /** + * The syntax used to add a primary key constraint to a table. + * Informix constraint name must be at the end. + * @return String + */ + public String getAddPrimaryKeyConstraintString(String constraintName) { + return " add constraint primary key constraint " + constraintName + " "; + } + + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; + } + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName + " restrict"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select " + getSelectSequenceNextValString( sequenceName ) + " from systables where tabid=1"; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return sequenceName + ".nextval"; + } + + public boolean supportsSequences() { + return true; + } + + public boolean supportsLimit() { + return true; + } + + public boolean useMaxForLimit() { + return true; + } + + public boolean supportsLimitOffset() { + return false; + } + + public String getLimitString(String querySelect, int offset, int limit) { + if (offset>0) throw new UnsupportedOperationException("informix has no offset"); + return new StringBuffer( querySelect.length()+8 ) + .append(querySelect) + .insert( querySelect.toLowerCase().indexOf( "select" ) + 6, " first " + limit ) + .toString(); + } + + public boolean supportsVariableLimit() { + return false; + } + + public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { + return EXTRACTER; + } + + private static ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() { + + /** + * Extract the name of the violated constraint from the given SQLException. + * + * @param sqle The exception that was the result of the constraint violation. + * @return The extracted constraint name. + */ + public String extractConstraintName(SQLException sqle) { + String constraintName = null; + + int errorCode = JDBCExceptionHelper.extractErrorCode(sqle); + if ( errorCode == -268 ) { + constraintName = extractUsingTemplate( "Unique constraint (", ") violated.", sqle.getMessage() ); + } + else if ( errorCode == -691 ) { + constraintName = extractUsingTemplate( "Missing key in referenced table for referential constraint (", ").", sqle.getMessage() ); + } + else if ( errorCode == -692 ) { + constraintName = extractUsingTemplate( "Key value for constraint (", ") is still being referenced.", sqle.getMessage() ); + } + + if (constraintName != null) { + // strip table-owner because Informix always returns constraint names as "." + int i = constraintName.indexOf('.'); + if (i != -1) { + constraintName = constraintName.substring(i + 1); + } + } + + return constraintName; + } + + }; + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + public String getCurrentTimestampSelectString() { + return "select distinct current timestamp from informix.systables"; + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/IngresDialect.java b/src/org/hibernate/dialect/IngresDialect.java new file mode 100644 index 0000000000..4a6defb622 --- /dev/null +++ b/src/org/hibernate/dialect/IngresDialect.java @@ -0,0 +1,292 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; + + +/** + * An Ingres SQL dialect. + *

    + * Known limitations: + * - only supports simple constants or columns on the left side of an IN, making (1,2,3) in (...) or (FOR UPDATE OF, allowing + * particular rows to be locked? + * + * @return True (Ingres does support "for update of" syntax...) + */ + public boolean supportsForUpdateOf() { + return true; + } + + /** + * The syntax used to add a column to a table (optional). + */ + public String getAddColumnString() { + return "add column"; + } + + /** + * The keyword used to specify a nullable column. + * + * @return String + */ + public String getNullColumnString() { + return " with null"; + } + + /** + * Does this dialect support sequences? + * + * @return boolean + */ + public boolean supportsSequences() { + return true; + } + + /** + * The syntax that fetches the next value of a sequence, if sequences are supported. + * + * @param sequenceName the name of the sequence + * + * @return String + */ + public String getSequenceNextValString(String sequenceName) { + return "select nextval for " + sequenceName; + } + + /** + * The syntax used to create a sequence, if sequences are supported. + * + * @param sequenceName the name of the sequence + * + * @return String + */ + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; + } + + /** + * The syntax used to drop a sequence, if sequences are supported. + * + * @param sequenceName the name of the sequence + * + * @return String + */ + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName + " restrict"; + } + + /** + * A query used to find all sequences + */ + public String getQuerySequencesString() { + return "select seq_name from iisequence"; + } + + /** + * The name of the SQL function that transforms a string to + * lowercase + * + * @return String + */ + public String getLowercaseFunction() { + return "lowercase"; + } + + /** + * Does this Dialect have some kind of LIMIT syntax? + */ + public boolean supportsLimit() { + return true; + } + + /** + * Does this dialect support an offset? + */ + public boolean supportsLimitOffset() { + return false; + } + + /** + * Add a LIMIT clause to the given SQL SELECT + * + * @return the modified SQL + */ + public String getLimitString(String querySelect, int offset, int limit) { + if ( offset > 0 ) { + throw new UnsupportedOperationException( "offset not supported" ); + } + return new StringBuffer( querySelect.length() + 16 ) + .append( querySelect ) + .insert( 6, " first " + limit ) + .toString(); + } + + public boolean supportsVariableLimit() { + return false; + } + + /** + * Does the LIMIT clause take a "maximum" row number instead + * of a total number of returned rows? + */ + public boolean useMaxForLimit() { + return true; + } + + /** + * Ingres explicitly needs "unique not null", because "with null" is default + */ + public boolean supportsNotNullUnique() { + return false; + } + + /** + * Does this dialect support temporary tables? + */ + public boolean supportsTemporaryTables() { + return true; + } + + public String getCreateTemporaryTableString() { + return "declare global temporary table"; + } + + public String getCreateTemporaryTablePostfix() { + return "on commit preserve rows with norecovery"; + } + + public String generateTemporaryTableName(String baseTableName) { + return "session." + super.generateTemporaryTableName( baseTableName ); + } + + + /** + * Expression for current_timestamp + */ + public String getCurrentTimestampSQLFunctionName() { + return "date(now)"; + } + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsSubselectAsInPredicateLHS() { + return false; + } +} diff --git a/src/org/hibernate/dialect/InterbaseDialect.java b/src/org/hibernate/dialect/InterbaseDialect.java new file mode 100644 index 0000000000..0147a31be7 --- /dev/null +++ b/src/org/hibernate/dialect/InterbaseDialect.java @@ -0,0 +1,105 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.VarArgsSQLFunction; + +/** + * An SQL dialect for Interbase. + * @author Gavin King + */ +public class InterbaseDialect extends Dialect { + + public InterbaseDialect() { + super(); + registerColumnType( Types.BIT, "smallint" ); + registerColumnType( Types.BIGINT, "numeric(18,0)" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "smallint" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "blob" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + registerColumnType( Types.BLOB, "blob" ); + registerColumnType( Types.CLOB, "blob sub_type 1" ); + + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(","||",")" ) ); + + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, NO_BATCH); + } + + public String getAddColumnString() { + return "add"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select " + getSelectSequenceNextValString( sequenceName ) + " from RDB$DATABASE"; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return "gen_id( " + sequenceName + ", 1 )"; + } + + public String getCreateSequenceString(String sequenceName) { + return "create generator " + sequenceName; + } + + public String getDropSequenceString(String sequenceName) { + return "delete from RDB$GENERATORS where RDB$GENERATOR_NAME = '" + sequenceName.toUpperCase() + "'"; + } + + public String getQuerySequencesString() { + return "select RDB$GENERATOR_NAME from RDB$GENERATORS"; + } + + public String getForUpdateString() { + return " with lock"; + } + public String getForUpdateString(String aliases) { + return " for update of " + aliases + " with lock"; + } + + public boolean supportsSequences() { + return true; + } + + public boolean supportsLimit() { + return true; + } + + public String getLimitString(String sql, boolean hasOffset) { + return new StringBuffer( sql.length()+15 ) + .append(sql) + .append(hasOffset ? " rows ? to ?" : " rows ?") + .toString(); + } + + public boolean bindLimitParametersFirst() { + return false; + } + + public boolean bindLimitParametersInReverseOrder() { + return false; + } + + public String getCurrentTimestampCallString() { + // TODO : not sure which (either?) is correct, could not find docs on how to do this. + // did find various blogs and forums mentioning that select CURRENT_TIMESTAMP + // does not work... + return "{?= call CURRENT_TIMESTAMP }"; +// return "select CURRENT_TIMESTAMP from RDB$DATABASE"; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return true; + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/JDataStoreDialect.java b/src/org/hibernate/dialect/JDataStoreDialect.java new file mode 100755 index 0000000000..5d307e9987 --- /dev/null +++ b/src/org/hibernate/dialect/JDataStoreDialect.java @@ -0,0 +1,78 @@ +// $Id$ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.cfg.Environment; + +/** + * A Dialect for JDataStore. + * + * @author Vishy Kasar + */ +public class JDataStoreDialect extends Dialect { + + /** + * Creates new JDataStoreDialect + */ + public JDataStoreDialect() { + super(); + + registerColumnType( Types.BIT, "tinyint" ); + registerColumnType( Types.BIGINT, "bigint" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "tinyint" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "varbinary($l)" ); + registerColumnType( Types.NUMERIC, "numeric($p, $s)" ); + + registerColumnType( Types.BLOB, "varbinary" ); + registerColumnType( Types.CLOB, "varchar" ); + + getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); + } + + public String getAddColumnString() { + return "add"; + } + + public boolean dropConstraints() { + return false; + } + + public String getCascadeConstraintsString() { + return " cascade"; + } + + public boolean supportsIdentityColumns() { + return true; + } + + public String getIdentitySelectString() { + return null; // NOT_SUPPORTED_SHOULD_USE_JDBC3_PreparedStatement.getGeneratedKeys_method + } + + public String getIdentityColumnString() { + return "autoincrement"; + } + + public String getNoColumnsInsertString() { + return "default values"; + } + + public boolean supportsColumnCheck() { + return false; + } + + public boolean supportsTableCheck() { + return false; + } + +} diff --git a/src/org/hibernate/dialect/MckoiDialect.java b/src/org/hibernate/dialect/MckoiDialect.java new file mode 100644 index 0000000000..3fee148329 --- /dev/null +++ b/src/org/hibernate/dialect/MckoiDialect.java @@ -0,0 +1,98 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.LockMode; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.UpdateLockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.MckoiCaseFragment; + +/** + * An SQL dialect compatible with McKoi SQL + * @author Doug Currie, Gabe Hicks + */ +public class MckoiDialect extends Dialect { + public MckoiDialect() { + super(); + registerColumnType( Types.BIT, "bit" ); + registerColumnType( Types.BIGINT, "bigint" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "tinyint" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "varbinary" ); + registerColumnType( Types.NUMERIC, "numeric" ); + registerColumnType( Types.BLOB, "blob" ); + registerColumnType( Types.CLOB, "clob" ); + + registerFunction( "upper", new StandardSQLFunction("upper") ); + registerFunction( "lower", new StandardSQLFunction("lower") ); + registerFunction( "sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction( "abs", new StandardSQLFunction("abs") ); + registerFunction( "sign", new StandardSQLFunction( "sign", Hibernate.INTEGER ) ); + registerFunction( "length", new StandardSQLFunction( "length", Hibernate.INTEGER ) ); + registerFunction( "round", new StandardSQLFunction( "round", Hibernate.INTEGER ) ); + registerFunction( "mod", new StandardSQLFunction( "mod", Hibernate.INTEGER ) ); + registerFunction( "least", new StandardSQLFunction("least") ); + registerFunction( "greatest", new StandardSQLFunction("greatest") ); + registerFunction( "user", new StandardSQLFunction( "user", Hibernate.STRING ) ); + registerFunction( "concat", new StandardSQLFunction( "concat", Hibernate.STRING ) ); + + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, NO_BATCH); + } + + public String getAddColumnString() { + return "add column"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select " + getSelectSequenceNextValString( sequenceName ); + } + + public String getSelectSequenceNextValString(String sequenceName) { + return "nextval('" + sequenceName + "')"; + } + + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; + } + + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName; + } + + public String getForUpdateString() { + return ""; + } + + public boolean supportsSequences() { + return true; + } + + public CaseFragment createCaseFragment() { + return new MckoiCaseFragment(); + } + + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + // Mckoi has no known variation of a "SELECT ... FOR UPDATE" syntax... + if ( lockMode.greaterThan( LockMode.READ ) ) { + return new UpdateLockingStrategy( lockable, lockMode ); + } + else { + return new SelectLockingStrategy( lockable, lockMode ); + } + } +} diff --git a/src/org/hibernate/dialect/MimerSQLDialect.java b/src/org/hibernate/dialect/MimerSQLDialect.java new file mode 100755 index 0000000000..ec44c02542 --- /dev/null +++ b/src/org/hibernate/dialect/MimerSQLDialect.java @@ -0,0 +1,221 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.*; + +/** + * An Hibernate 3 SQL dialect for Mimer SQL. This dialect requires Mimer SQL 9.2.1 or later + * because of the mappings to NCLOB, BINARY, and BINARY VARYING. + * @author Fredrik Ålund + */ +public class MimerSQLDialect extends Dialect { + + private static final int NATIONAL_CHAR_LENGTH = 2000; + private static final int BINARY_MAX_LENGTH = 2000; + + /** + * Even thoug Mimer SQL supports character and binary columns up to 15 000 in lenght, + * this is also the maximum width of the table (exluding LOBs). To avoid breaking the limit all the + * time we limit the length of the character columns to CHAR_MAX_LENTH, NATIONAL_CHAR_LENGTH for national + * characters, and BINARY_MAX_LENGTH for binary types. + * + */ + public MimerSQLDialect() { + super(); + registerColumnType( Types.BIT, "ODBC.BIT" ); + registerColumnType( Types.BIGINT, "BIGINT" ); + registerColumnType( Types.SMALLINT, "SMALLINT" ); + registerColumnType( Types.TINYINT, "ODBC.TINYINT" ); + registerColumnType( Types.INTEGER, "INTEGER" ); + registerColumnType( Types.CHAR, "NCHAR(1)" ); + registerColumnType( Types.VARCHAR, NATIONAL_CHAR_LENGTH, "NATIONAL CHARACTER VARYING($l)" ); + registerColumnType( Types.VARCHAR, "NCLOB($l)" ); + registerColumnType( Types.LONGVARCHAR, "CLOB($1)"); + registerColumnType( Types.FLOAT, "FLOAT" ); + registerColumnType( Types.DOUBLE, "DOUBLE PRECISION" ); + registerColumnType( Types.DATE, "DATE" ); + registerColumnType( Types.TIME, "TIME" ); + registerColumnType( Types.TIMESTAMP, "TIMESTAMP" ); + registerColumnType( Types.VARBINARY, BINARY_MAX_LENGTH, "BINARY VARYING($l)" ); + registerColumnType( Types.VARBINARY, "BLOB($1)" ); + registerColumnType( Types.LONGVARBINARY, "BLOB($1)"); + registerColumnType( Types.BINARY, BINARY_MAX_LENGTH, "BINARY" ); + registerColumnType( Types.BINARY, "BLOB($1)" ); + registerColumnType( Types.NUMERIC, "NUMERIC(19, $l)" ); + registerColumnType( Types.BLOB, "BLOB($l)" ); + registerColumnType( Types.CLOB, "NCLOB($l)" ); + + registerFunction("abs", new StandardSQLFunction("abs") ); + registerFunction("sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + registerFunction("ceiling", new StandardSQLFunction("ceiling") ); + registerFunction("floor", new StandardSQLFunction("floor") ); + registerFunction("round", new StandardSQLFunction("round") ); + + registerFunction("dacos", new StandardSQLFunction("dacos", Hibernate.DOUBLE) ); + registerFunction("acos", new StandardSQLFunction("dacos", Hibernate.DOUBLE) ); + registerFunction("dasin", new StandardSQLFunction("dasin", Hibernate.DOUBLE) ); + registerFunction("asin", new StandardSQLFunction("dasin", Hibernate.DOUBLE) ); + registerFunction("datan", new StandardSQLFunction("datan", Hibernate.DOUBLE) ); + registerFunction("atan", new StandardSQLFunction("datan", Hibernate.DOUBLE) ); + registerFunction("datan2", new StandardSQLFunction("datan2", Hibernate.DOUBLE) ); + registerFunction("atan2", new StandardSQLFunction("datan2", Hibernate.DOUBLE) ); + registerFunction("dcos", new StandardSQLFunction("dcos", Hibernate.DOUBLE) ); + registerFunction("cos", new StandardSQLFunction("dcos", Hibernate.DOUBLE) ); + registerFunction("dcot", new StandardSQLFunction("dcot", Hibernate.DOUBLE) ); + registerFunction("cot", new StandardSQLFunction("dcot", Hibernate.DOUBLE) ); + registerFunction("ddegrees", new StandardSQLFunction("ddegrees", Hibernate.DOUBLE) ); + registerFunction("degrees", new StandardSQLFunction("ddegrees", Hibernate.DOUBLE) ); + registerFunction("dexp", new StandardSQLFunction("dexp", Hibernate.DOUBLE) ); + registerFunction("exp", new StandardSQLFunction("dexp", Hibernate.DOUBLE) ); + registerFunction("dlog", new StandardSQLFunction("dlog", Hibernate.DOUBLE) ); + registerFunction("log", new StandardSQLFunction("dlog", Hibernate.DOUBLE) ); + registerFunction("dlog10", new StandardSQLFunction("dlog10", Hibernate.DOUBLE) ); + registerFunction("log10", new StandardSQLFunction("dlog10", Hibernate.DOUBLE) ); + registerFunction("dradian", new StandardSQLFunction("dradian", Hibernate.DOUBLE) ); + registerFunction("radian", new StandardSQLFunction("dradian", Hibernate.DOUBLE) ); + registerFunction("dsin", new StandardSQLFunction("dsin", Hibernate.DOUBLE) ); + registerFunction("sin", new StandardSQLFunction("dsin", Hibernate.DOUBLE) ); + registerFunction("soundex", new StandardSQLFunction("soundex", Hibernate.STRING) ); + registerFunction("dsqrt", new StandardSQLFunction("dsqrt", Hibernate.DOUBLE) ); + registerFunction("sqrt", new StandardSQLFunction("dsqrt", Hibernate.DOUBLE) ); + registerFunction("dtan", new StandardSQLFunction("dtan", Hibernate.DOUBLE) ); + registerFunction("tan", new StandardSQLFunction("dtan", Hibernate.DOUBLE) ); + registerFunction("dpower", new StandardSQLFunction("dpower") ); + registerFunction("power", new StandardSQLFunction("dpower") ); + + registerFunction("date", new StandardSQLFunction("date", Hibernate.DATE) ); + registerFunction("dayofweek", new StandardSQLFunction("dayofweek", Hibernate.INTEGER) ); + registerFunction("dayofyear", new StandardSQLFunction("dayofyear", Hibernate.INTEGER) ); + registerFunction("time", new StandardSQLFunction("time", Hibernate.TIME) ); + registerFunction("timestamp", new StandardSQLFunction("timestamp", Hibernate.TIMESTAMP) ); + registerFunction("week", new StandardSQLFunction("week", Hibernate.INTEGER) ); + + + registerFunction("varchar", new StandardSQLFunction("varchar", Hibernate.STRING) ); + registerFunction("real", new StandardSQLFunction("real", Hibernate.FLOAT) ); + registerFunction("bigint", new StandardSQLFunction("bigint", Hibernate.LONG) ); + registerFunction("char", new StandardSQLFunction("char", Hibernate.CHARACTER) ); + registerFunction("integer", new StandardSQLFunction("integer", Hibernate.INTEGER) ); + registerFunction("smallint", new StandardSQLFunction("smallint", Hibernate.SHORT) ); + + registerFunction("ascii_char", new StandardSQLFunction("ascii_char", Hibernate.CHARACTER) ); + registerFunction("ascii_code", new StandardSQLFunction("ascii_code", Hibernate.STRING)); + registerFunction("unicode_char", new StandardSQLFunction("unicode_char", Hibernate.LONG)); + registerFunction("unicode_code", new StandardSQLFunction("unicode_code", Hibernate.STRING)); + registerFunction("upper", new StandardSQLFunction("upper") ); + registerFunction("lower", new StandardSQLFunction("lower") ); + registerFunction("char_length", new StandardSQLFunction("char_length", Hibernate.LONG) ); + registerFunction("bit_length", new StandardSQLFunction("bit_length", Hibernate.STRING)); + + getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true"); + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, "50"); + } + + /** + * The syntax used to add a column to a table + */ + public String getAddColumnString() { + return "add column"; + } + + /** + * We do not have to drop constraints before we drop the table + */ + public boolean dropConstraints() { + return false; + } + + /** + * TODO: Check if Mimer SQL cannot handle the way DB2 does + */ + public boolean supportsIdentityColumns() { + return false; + } + + /** + * Mimer SQL supports sequences + * @return boolean + */ + public boolean supportsSequences() { + return true; + } + + /** + * The syntax used to get the next value of a sequence in Mimer SQL + */ + public String getSequenceNextValString(String sequenceName) { + return "select next_value of " + sequenceName + " from system.onerow"; + } + + /** + * The syntax used to create a sequence. Since we presume the sequences will be used as keys, + * we make them unique. + */ + public String getCreateSequenceString(String sequenceName) { + return "create unique sequence " + sequenceName; + } + + /** + * The syntax used to drop sequences + */ + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName + " restrict"; + } + + /** + * Mimer SQL does not support limit + */ + public boolean supportsLimit() { + return false; + } + + /** + * The syntax for using cascade on constraints + */ + public String getCascadeConstraintsString() { + return " cascade"; + } + + /** + * The syntax for fetching all sequnces avialable in the current schema. + */ + public String getQuerySequencesString() { + return "select sequence_schema || '.' || sequence_name from information_schema.ext_sequences"; + } + + /** + * Does the FOR UPDATE OF syntax specify particular + * columns? + */ + public boolean forUpdateOfColumns() { + return false; + } + + /** + * Support the FOR UPDATE syntax? For now, returns false since + * the current version of the Mimer SQL JDBC Driver does not support + * updatable resultsets. Otherwise, Mimer SQL actually supports the for update syntax. + * @return boolean + */ + public boolean supportsForUpdate() { + return false; + } + + + /** + * For now, simply return false since we don't updatable result sets. + */ + public boolean supportsOuterJoinForUpdate() { + return false; + } +} + + + + + + diff --git a/src/org/hibernate/dialect/MySQL5Dialect.java b/src/org/hibernate/dialect/MySQL5Dialect.java new file mode 100644 index 0000000000..4425845f11 --- /dev/null +++ b/src/org/hibernate/dialect/MySQL5Dialect.java @@ -0,0 +1,16 @@ +package org.hibernate.dialect; + +import java.sql.Types; + +/** + * An SQL dialect for MySQL 5.x specific features. + * + * @author Steve Ebersole + */ +public class MySQL5Dialect extends MySQLDialect { + protected void registerVarcharTypes() { + registerColumnType( Types.VARCHAR, "longtext" ); + registerColumnType( Types.VARCHAR, 16777215, "mediumtext" ); + registerColumnType( Types.VARCHAR, 65535, "varchar($l)" ); + } +} diff --git a/src/org/hibernate/dialect/MySQL5InnoDBDialect.java b/src/org/hibernate/dialect/MySQL5InnoDBDialect.java new file mode 100755 index 0000000000..dc9c870471 --- /dev/null +++ b/src/org/hibernate/dialect/MySQL5InnoDBDialect.java @@ -0,0 +1,21 @@ +//$Id: $ +package org.hibernate.dialect; + +/** + * @author Gavin King, Scott Marlow + */ +public class MySQL5InnoDBDialect extends MySQL5Dialect { + + public boolean supportsCascadeDelete() { + return true; + } + + public String getTableTypeString() { + return " ENGINE=InnoDB"; + } + + public boolean hasSelfReferentialForeignKeyBug() { + return true; + } + +} diff --git a/src/org/hibernate/dialect/MySQLDialect.java b/src/org/hibernate/dialect/MySQLDialect.java new file mode 100644 index 0000000000..6f9006333e --- /dev/null +++ b/src/org/hibernate/dialect/MySQLDialect.java @@ -0,0 +1,350 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.CallableStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.util.StringHelper; + +/** + * An SQL dialect for MySQL (prior to 5.x). + * + * @author Gavin King + */ +public class MySQLDialect extends Dialect { + + public MySQLDialect() { + super(); + registerColumnType( Types.BIT, "bit" ); + registerColumnType( Types.BIGINT, "bigint" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "tinyint" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "datetime" ); + registerColumnType( Types.VARBINARY, "longblob" ); + registerColumnType( Types.VARBINARY, 16777215, "mediumblob" ); + registerColumnType( Types.VARBINARY, 65535, "blob" ); + registerColumnType( Types.VARBINARY, 255, "tinyblob" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + registerColumnType( Types.BLOB, "longblob" ); + registerColumnType( Types.BLOB, 16777215, "mediumblob" ); + registerColumnType( Types.BLOB, 65535, "blob" ); + registerColumnType( Types.CLOB, "longtext" ); + registerColumnType( Types.CLOB, 16777215, "mediumtext" ); + registerColumnType( Types.CLOB, 65535, "text" ); + registerVarcharTypes(); + + registerFunction("ascii", new StandardSQLFunction("ascii", Hibernate.INTEGER) ); + registerFunction("bin", new StandardSQLFunction("bin", Hibernate.STRING) ); + registerFunction("char_length", new StandardSQLFunction("char_length", Hibernate.LONG) ); + registerFunction("character_length", new StandardSQLFunction("character_length", Hibernate.LONG) ); + registerFunction("lcase", new StandardSQLFunction("lcase") ); + registerFunction("lower", new StandardSQLFunction("lower") ); + registerFunction("length", new StandardSQLFunction("length", Hibernate.LONG) ); + registerFunction("ltrim", new StandardSQLFunction("ltrim") ); + registerFunction("ord", new StandardSQLFunction("ord", Hibernate.INTEGER) ); + registerFunction("quote", new StandardSQLFunction("quote") ); + registerFunction("reverse", new StandardSQLFunction("reverse") ); + registerFunction("rtrim", new StandardSQLFunction("rtrim") ); + registerFunction("soundex", new StandardSQLFunction("soundex") ); + registerFunction("space", new StandardSQLFunction("space", Hibernate.STRING) ); + registerFunction("ucase", new StandardSQLFunction("ucase") ); + registerFunction("upper", new StandardSQLFunction("upper") ); + registerFunction("unhex", new StandardSQLFunction("unhex", Hibernate.STRING) ); + + registerFunction("abs", new StandardSQLFunction("abs") ); + registerFunction("sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction("acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction("asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction("atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction("cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction("cot", new StandardSQLFunction("cot", Hibernate.DOUBLE) ); + registerFunction("crc32", new StandardSQLFunction("crc32", Hibernate.LONG) ); + registerFunction("exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction("ln", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction("log", new StandardSQLFunction("log", Hibernate.DOUBLE) ); + registerFunction("log2", new StandardSQLFunction("log2", Hibernate.DOUBLE) ); + registerFunction("log10", new StandardSQLFunction("log10", Hibernate.DOUBLE) ); + registerFunction("pi", new NoArgSQLFunction("pi", Hibernate.DOUBLE) ); + registerFunction("rand", new NoArgSQLFunction("rand", Hibernate.DOUBLE) ); + registerFunction("sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction("sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction("tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + + registerFunction("radians", new StandardSQLFunction("radians", Hibernate.DOUBLE) ); + registerFunction("degrees", new StandardSQLFunction("degrees", Hibernate.DOUBLE) ); + + registerFunction("ceiling", new StandardSQLFunction("ceiling", Hibernate.INTEGER) ); + registerFunction("ceil", new StandardSQLFunction("ceil", Hibernate.INTEGER) ); + registerFunction("floor", new StandardSQLFunction("floor", Hibernate.INTEGER) ); + registerFunction("round", new StandardSQLFunction("round", Hibernate.INTEGER) ); + + registerFunction("datediff", new StandardSQLFunction("datediff", Hibernate.INTEGER) ); + registerFunction("timediff", new StandardSQLFunction("timediff", Hibernate.TIME) ); + registerFunction("date_format", new StandardSQLFunction("date_format", Hibernate.STRING) ); + + registerFunction("curdate", new NoArgSQLFunction("curdate", Hibernate.DATE) ); + registerFunction("curtime", new NoArgSQLFunction("curtime", Hibernate.TIME) ); + registerFunction("current_date", new NoArgSQLFunction("current_date", Hibernate.DATE, false) ); + registerFunction("current_time", new NoArgSQLFunction("current_time", Hibernate.TIME, false) ); + registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", Hibernate.TIMESTAMP, false) ); + registerFunction("date", new StandardSQLFunction("date", Hibernate.DATE) ); + registerFunction("day", new StandardSQLFunction("day", Hibernate.INTEGER) ); + registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth", Hibernate.INTEGER) ); + registerFunction("dayname", new StandardSQLFunction("dayname", Hibernate.STRING) ); + registerFunction("dayofweek", new StandardSQLFunction("dayofweek", Hibernate.INTEGER) ); + registerFunction("dayofyear", new StandardSQLFunction("dayofyear", Hibernate.INTEGER) ); + registerFunction("from_days", new StandardSQLFunction("from_days", Hibernate.DATE) ); + registerFunction("from_unixtime", new StandardSQLFunction("from_unixtime", Hibernate.TIMESTAMP) ); + registerFunction("hour", new StandardSQLFunction("hour", Hibernate.INTEGER) ); + registerFunction("last_day", new StandardSQLFunction("last_day", Hibernate.DATE) ); + registerFunction("localtime", new NoArgSQLFunction("localtime", Hibernate.TIMESTAMP) ); + registerFunction("localtimestamp", new NoArgSQLFunction("localtimestamp", Hibernate.TIMESTAMP) ); + registerFunction("microseconds", new StandardSQLFunction("microseconds", Hibernate.INTEGER) ); + registerFunction("minute", new StandardSQLFunction("minute", Hibernate.INTEGER) ); + registerFunction("month", new StandardSQLFunction("month", Hibernate.INTEGER) ); + registerFunction("monthname", new StandardSQLFunction("monthname", Hibernate.STRING) ); + registerFunction("now", new NoArgSQLFunction("now", Hibernate.TIMESTAMP) ); + registerFunction("quarter", new StandardSQLFunction("quarter", Hibernate.INTEGER) ); + registerFunction("second", new StandardSQLFunction("second", Hibernate.INTEGER) ); + registerFunction("sec_to_time", new StandardSQLFunction("sec_to_time", Hibernate.TIME) ); + registerFunction("sysdate", new NoArgSQLFunction("sysdate", Hibernate.TIMESTAMP) ); + registerFunction("time", new StandardSQLFunction("time", Hibernate.TIME) ); + registerFunction("timestamp", new StandardSQLFunction("timestamp", Hibernate.TIMESTAMP) ); + registerFunction("time_to_sec", new StandardSQLFunction("time_to_sec", Hibernate.INTEGER) ); + registerFunction("to_days", new StandardSQLFunction("to_days", Hibernate.LONG) ); + registerFunction("unix_timestamp", new StandardSQLFunction("unix_timestamp", Hibernate.LONG) ); + registerFunction("utc_date", new NoArgSQLFunction("utc_date", Hibernate.STRING) ); + registerFunction("utc_time", new NoArgSQLFunction("utc_time", Hibernate.STRING) ); + registerFunction("utc_timestamp", new NoArgSQLFunction("utc_timestamp", Hibernate.STRING) ); + registerFunction("week", new StandardSQLFunction("week", Hibernate.INTEGER) ); + registerFunction("weekday", new StandardSQLFunction("weekday", Hibernate.INTEGER) ); + registerFunction("weekofyear", new StandardSQLFunction("weekofyear", Hibernate.INTEGER) ); + registerFunction("year", new StandardSQLFunction("year", Hibernate.INTEGER) ); + registerFunction("yearweek", new StandardSQLFunction("yearweek", Hibernate.INTEGER) ); + + registerFunction("hex", new StandardSQLFunction("hex", Hibernate.STRING) ); + registerFunction("oct", new StandardSQLFunction("oct", Hibernate.STRING) ); + + registerFunction("octet_length", new StandardSQLFunction("octet_length", Hibernate.LONG) ); + registerFunction("bit_length", new StandardSQLFunction("bit_length", Hibernate.LONG) ); + + registerFunction("bit_count", new StandardSQLFunction("bit_count", Hibernate.LONG) ); + registerFunction("encrypt", new StandardSQLFunction("encrypt", Hibernate.STRING) ); + registerFunction("md5", new StandardSQLFunction("md5", Hibernate.STRING) ); + registerFunction("sha1", new StandardSQLFunction("sha1", Hibernate.STRING) ); + registerFunction("sha", new StandardSQLFunction("sha", Hibernate.STRING) ); + + registerFunction( "concat", new StandardSQLFunction( "concat", Hibernate.STRING ) ); + + getDefaultProperties().setProperty(Environment.MAX_FETCH_DEPTH, "2"); + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE); + } + + protected void registerVarcharTypes() { + registerColumnType( Types.VARCHAR, "longtext" ); + registerColumnType( Types.VARCHAR, 16777215, "mediumtext" ); + registerColumnType( Types.VARCHAR, 65535, "text" ); + registerColumnType( Types.VARCHAR, 255, "varchar($l)" ); + } + + public String getAddColumnString() { + return "add column"; + } + + public boolean qualifyIndexName() { + return false; + } + + public boolean supportsIdentityColumns() { + return true; + } + + public String getIdentitySelectString() { + return "select last_insert_id()"; + } + + public String getIdentityColumnString() { + return "not null auto_increment"; //starts with 1, implicitly + } + + public String getAddForeignKeyConstraintString( + String constraintName, + String[] foreignKey, + String referencedTable, + String[] primaryKey, boolean referencesPrimaryKey + ) { + String cols = StringHelper.join(", ", foreignKey); + return new StringBuffer(30) + .append(" add index ") + .append(constraintName) + .append(" (") + .append(cols) + .append("), add constraint ") + .append(constraintName) + .append(" foreign key (") + .append(cols) + .append(") references ") + .append(referencedTable) + .append(" (") + .append( StringHelper.join(", ", primaryKey) ) + .append(')') + .toString(); + } + + public boolean supportsLimit() { + return true; + } + + public String getDropForeignKeyString() { + return " drop foreign key "; + } + + public String getLimitString(String sql, boolean hasOffset) { + return new StringBuffer( sql.length()+20 ) + .append(sql) + .append( hasOffset ? " limit ?, ?" : " limit ?") + .toString(); + } + + /* + * Temporary, until MySQL fix Connector/J bug + */ + /*public String getLimitString(String sql, int offset, int limit) { + StringBuffer buf = new StringBuffer( sql.length()+20 ) + .append(sql); + if (offset>0) { + buf.append(" limit ") + .append(offset) + .append(", ") + .append(limit); + } + else { + buf.append(" limit ") + .append(limit); + } + return buf.toString(); + }*/ + + /* + * Temporary, until MySQL fix Connector/J bug + */ + /*public boolean supportsVariableLimit() { + return false; + }*/ + + public char closeQuote() { + return '`'; + } + + public char openQuote() { + return '`'; + } + + public boolean supportsIfExistsBeforeTableName() { + return true; + } + + public String getSelectGUIDString() { + return "select uuid()"; + } + + public boolean supportsCascadeDelete() { + return false; + } + + public String getTableComment(String comment) { + return " comment='" + comment + "'"; + } + + public String getColumnComment(String comment) { + return " comment '" + comment + "'"; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String getCreateTemporaryTableString() { + return "create temporary table if not exists"; + } + + public String getCastTypeName(int code) { + if ( code==Types.INTEGER ) { + return "signed"; + } + else if ( code==Types.VARCHAR ) { + return "char"; + } + else if ( code==Types.VARBINARY ) { + return "binary"; + } + else { + return super.getCastTypeName( code ); + } + } + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + public String getCurrentTimestampSelectString() { + return "select now()"; + } + + public int registerResultSetOutParameter(CallableStatement statement, int col) throws SQLException { + return col; + } + + public ResultSet getResultSet(CallableStatement ps) throws SQLException { + boolean isResultSet = ps.execute(); + while (!isResultSet && ps.getUpdateCount() != -1) { + isResultSet = ps.getMoreResults(); + } + return ps.getResultSet(); + } + + public boolean supportsRowValueConstructorSyntax() { + return true; + } + + public Boolean performTemporaryTableDDLInIsolation() { + return Boolean.FALSE; + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } + + public boolean areStringComparisonsCaseInsensitive() { + return true; + } + + public boolean supportsLobValueChangePropogation() { + // note: at least my local MySQL 5.1 install shows this not working... + return false; + } + + public boolean supportsSubqueryOnMutatingTable() { + return false; + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/MySQLInnoDBDialect.java b/src/org/hibernate/dialect/MySQLInnoDBDialect.java new file mode 100755 index 0000000000..b4e874ce06 --- /dev/null +++ b/src/org/hibernate/dialect/MySQLInnoDBDialect.java @@ -0,0 +1,21 @@ +//$Id$ +package org.hibernate.dialect; + +/** + * @author Gavin King + */ +public class MySQLInnoDBDialect extends MySQLDialect { + + public boolean supportsCascadeDelete() { + return true; + } + + public String getTableTypeString() { + return " type=InnoDB"; + } + + public boolean hasSelfReferentialForeignKeyBug() { + return true; + } + +} diff --git a/src/org/hibernate/dialect/MySQLMyISAMDialect.java b/src/org/hibernate/dialect/MySQLMyISAMDialect.java new file mode 100755 index 0000000000..3ac0e82b9d --- /dev/null +++ b/src/org/hibernate/dialect/MySQLMyISAMDialect.java @@ -0,0 +1,17 @@ +//$Id$ +package org.hibernate.dialect; + +/** + * @author Gavin King + */ +public class MySQLMyISAMDialect extends MySQLDialect { + + public String getTableTypeString() { + return " type=MyISAM"; + } + + public boolean dropConstraints() { + return false; + } + +} diff --git a/src/org/hibernate/dialect/Oracle10gDialect.java b/src/org/hibernate/dialect/Oracle10gDialect.java new file mode 100644 index 0000000000..66d5699767 --- /dev/null +++ b/src/org/hibernate/dialect/Oracle10gDialect.java @@ -0,0 +1,26 @@ +//$Id$ +package org.hibernate.dialect; + +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.ANSIJoinFragment; + + +/** + * A dialect specifically for use with Oracle 10g. + *

    + * The main difference between this dialect and {@link Oracle9iDialect} + * is the use of "ANSI join syntax" here... + * + * @author Steve Ebersole + */ +public class Oracle10gDialect extends Oracle9iDialect { + + public Oracle10gDialect() { + super(); + } + + public JoinFragment createOuterJoinFragment() { + return new ANSIJoinFragment(); + } + +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/Oracle8iDialect.java b/src/org/hibernate/dialect/Oracle8iDialect.java new file mode 100644 index 0000000000..743f7d8484 --- /dev/null +++ b/src/org/hibernate/dialect/Oracle8iDialect.java @@ -0,0 +1,431 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; +import java.sql.SQLException; +import java.sql.ResultSet; +import java.sql.CallableStatement; + +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.DecodeCaseFragment; +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.OracleJoinFragment; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.NvlFunction; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.util.ReflectHelper; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter; +import org.hibernate.exception.JDBCExceptionHelper; + +/** + * A dialect for Oracle 8i. + * + * @author Steve Ebersole + */ +public class Oracle8iDialect extends Dialect { + + public Oracle8iDialect() { + super(); + registerCharacterTypeMappings(); + registerNumericTypeMappings(); + registerDateTimeTypeMappings(); + registerLargeObjectTypeMappings(); + + registerReverseHibernateTypeMappings(); + + registerFunctions(); + + registerDefaultProperties(); + } + + protected void registerCharacterTypeMappings() { + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, 4000, "varchar2($l)" ); + registerColumnType( Types.VARCHAR, "long" ); + } + + protected void registerNumericTypeMappings() { + registerColumnType( Types.BIT, "number(1,0)" ); + registerColumnType( Types.BIGINT, "number(19,0)" ); + registerColumnType( Types.SMALLINT, "number(5,0)" ); + registerColumnType( Types.TINYINT, "number(3,0)" ); + registerColumnType( Types.INTEGER, "number(10,0)" ); + + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.NUMERIC, "number($p,$s)" ); + registerColumnType( Types.DECIMAL, "number($p,$s)" ); + } + + protected void registerDateTimeTypeMappings() { + registerColumnType( Types.TIMESTAMP, "date" ); + } + + protected void registerLargeObjectTypeMappings() { + registerColumnType( Types.VARBINARY, 2000, "raw($l)" ); + registerColumnType( Types.VARBINARY, "long raw" ); + + registerColumnType( Types.BLOB, "blob" ); + registerColumnType( Types.CLOB, "clob" ); + } + + protected void registerReverseHibernateTypeMappings() { + } + + protected void registerFunctions() { + registerFunction( "abs", new StandardSQLFunction("abs") ); + registerFunction( "sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction( "acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction( "asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction( "atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction( "cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction( "cosh", new StandardSQLFunction("cosh", Hibernate.DOUBLE) ); + registerFunction( "exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction( "ln", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction( "sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction( "sinh", new StandardSQLFunction("sinh", Hibernate.DOUBLE) ); + registerFunction( "stddev", new StandardSQLFunction("stddev", Hibernate.DOUBLE) ); + registerFunction( "sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction( "tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + registerFunction( "tanh", new StandardSQLFunction("tanh", Hibernate.DOUBLE) ); + registerFunction( "variance", new StandardSQLFunction("variance", Hibernate.DOUBLE) ); + + registerFunction( "round", new StandardSQLFunction("round") ); + registerFunction( "trunc", new StandardSQLFunction("trunc") ); + registerFunction( "ceil", new StandardSQLFunction("ceil") ); + registerFunction( "floor", new StandardSQLFunction("floor") ); + + registerFunction( "chr", new StandardSQLFunction("chr", Hibernate.CHARACTER) ); + registerFunction( "initcap", new StandardSQLFunction("initcap") ); + registerFunction( "lower", new StandardSQLFunction("lower") ); + registerFunction( "ltrim", new StandardSQLFunction("ltrim") ); + registerFunction( "rtrim", new StandardSQLFunction("rtrim") ); + registerFunction( "soundex", new StandardSQLFunction("soundex") ); + registerFunction( "upper", new StandardSQLFunction("upper") ); + registerFunction( "ascii", new StandardSQLFunction("ascii", Hibernate.INTEGER) ); + registerFunction( "length", new StandardSQLFunction("length", Hibernate.LONG) ); + + registerFunction( "to_char", new StandardSQLFunction("to_char", Hibernate.STRING) ); + registerFunction( "to_date", new StandardSQLFunction("to_date", Hibernate.TIMESTAMP) ); + + registerFunction( "current_date", new NoArgSQLFunction("current_date", Hibernate.DATE, false) ); + registerFunction( "current_time", new NoArgSQLFunction("current_timestamp", Hibernate.TIME, false) ); + registerFunction( "current_timestamp", new NoArgSQLFunction("current_timestamp", Hibernate.TIMESTAMP, false) ); + + registerFunction( "lastday", new StandardSQLFunction("lastday", Hibernate.DATE) ); + registerFunction( "sysdate", new NoArgSQLFunction("sysdate", Hibernate.DATE, false) ); + registerFunction( "systimestamp", new NoArgSQLFunction("systimestamp", Hibernate.TIMESTAMP, false) ); + registerFunction( "uid", new NoArgSQLFunction("uid", Hibernate.INTEGER, false) ); + registerFunction( "user", new NoArgSQLFunction("user", Hibernate.STRING, false) ); + + registerFunction( "rowid", new NoArgSQLFunction("rowid", Hibernate.LONG, false) ); + registerFunction( "rownum", new NoArgSQLFunction("rownum", Hibernate.LONG, false) ); + + // Multi-param string dialect functions... + registerFunction( "concat", new VarArgsSQLFunction(Hibernate.STRING, "", "||", "") ); + registerFunction( "instr", new StandardSQLFunction("instr", Hibernate.INTEGER) ); + registerFunction( "instrb", new StandardSQLFunction("instrb", Hibernate.INTEGER) ); + registerFunction( "lpad", new StandardSQLFunction("lpad", Hibernate.STRING) ); + registerFunction( "replace", new StandardSQLFunction("replace", Hibernate.STRING) ); + registerFunction( "rpad", new StandardSQLFunction("rpad", Hibernate.STRING) ); + registerFunction( "substr", new StandardSQLFunction("substr", Hibernate.STRING) ); + registerFunction( "substrb", new StandardSQLFunction("substrb", Hibernate.STRING) ); + registerFunction( "translate", new StandardSQLFunction("translate", Hibernate.STRING) ); + + registerFunction( "substring", new StandardSQLFunction( "substr", Hibernate.STRING ) ); + registerFunction( "locate", new SQLFunctionTemplate( Hibernate.INTEGER, "instr(?2,?1)" ) ); + registerFunction( "bit_length", new SQLFunctionTemplate( Hibernate.INTEGER, "vsize(?1)*8" ) ); + registerFunction( "coalesce", new NvlFunction() ); + + // Multi-param numeric dialect functions... + registerFunction( "atan2", new StandardSQLFunction("atan2", Hibernate.FLOAT) ); + registerFunction( "log", new StandardSQLFunction("log", Hibernate.INTEGER) ); + registerFunction( "mod", new StandardSQLFunction("mod", Hibernate.INTEGER) ); + registerFunction( "nvl", new StandardSQLFunction("nvl") ); + registerFunction( "nvl2", new StandardSQLFunction("nvl2") ); + registerFunction( "power", new StandardSQLFunction("power", Hibernate.FLOAT) ); + + // Multi-param date dialect functions... + registerFunction( "add_months", new StandardSQLFunction("add_months", Hibernate.DATE) ); + registerFunction( "months_between", new StandardSQLFunction("months_between", Hibernate.FLOAT) ); + registerFunction( "next_day", new StandardSQLFunction("next_day", Hibernate.DATE) ); + + registerFunction( "str", new StandardSQLFunction("to_char", Hibernate.STRING) ); + } + + protected void registerDefaultProperties() { + getDefaultProperties().setProperty( Environment.USE_STREAMS_FOR_BINARY, "true" ); + getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); + // Oracle driver reports to support getGeneratedKeys(), but they only + // support the version taking an array of the names of the columns to + // be returned (via its RETURNING clause). No other driver seems to + // support this overloaded version. + getDefaultProperties().setProperty( Environment.USE_GET_GENERATED_KEYS, "false" ); + } + + + // features which change between 8i, 9i, and 10g ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Support for the oracle proprietary join syntax... + * + * @return The orqacle join fragment + */ + public JoinFragment createOuterJoinFragment() { + return new OracleJoinFragment(); + } + + /** + * Map case support to the Oracle DECODE function. Oracle did not + * add support for CASE until 9i. + * + * @return The oracle CASE -> DECODE fragment + */ + public CaseFragment createCaseFragment() { + return new DecodeCaseFragment(); + } + + public String getLimitString(String sql, boolean hasOffset) { + sql = sql.trim(); + boolean isForUpdate = false; + if ( sql.toLowerCase().endsWith(" for update") ) { + sql = sql.substring( 0, sql.length()-11 ); + isForUpdate = true; + } + + StringBuffer pagingSelect = new StringBuffer( sql.length()+100 ); + if (hasOffset) { + pagingSelect.append("select * from ( select row_.*, rownum rownum_ from ( "); + } + else { + pagingSelect.append("select * from ( "); + } + pagingSelect.append(sql); + if (hasOffset) { + pagingSelect.append(" ) row_ ) where rownum_ <= ? and rownum_ > ?"); + } + else { + pagingSelect.append(" ) where rownum <= ?"); + } + + if ( isForUpdate ) { + pagingSelect.append( " for update" ); + } + + return pagingSelect.toString(); + } + + /** + * Allows access to the basic {@link Dialect#getSelectClauseNullString} + * implementation... + * + * @param sqlType The {@link java.sql.Types} mapping type code + * @return The appropriate select cluse fragment + */ + public String getBasicSelectClauseNullString(int sqlType) { + return super.getSelectClauseNullString( sqlType ); + } + + public String getSelectClauseNullString(int sqlType) { + switch(sqlType) { + case Types.VARCHAR: + case Types.CHAR: + return "to_char(null)"; + case Types.DATE: + case Types.TIMESTAMP: + case Types.TIME: + return "to_date(null)"; + default: + return "to_number(null)"; + } + } + + public String getCurrentTimestampSelectString() { + return "select sysdate from dual"; + } + + public String getCurrentTimestampSQLFunctionName() { + return "sysdate"; + } + + + // features which remain constant across 8i, 9i, and 10g ~~~~~~~~~~~~~~~~~~ + + public String getAddColumnString() { + return "add"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select " + getSelectSequenceNextValString( sequenceName ) + " from dual"; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return sequenceName + ".nextval"; + } + + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; //starts with 1, implicitly + } + + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName; + } + + public String getCascadeConstraintsString() { + return " cascade constraints"; + } + + public boolean dropConstraints() { + return false; + } + + public String getForUpdateNowaitString() { + return " for update nowait"; + } + + public boolean supportsSequences() { + return true; + } + + public boolean supportsPooledSequences() { + return true; + } + + public boolean supportsLimit() { + return true; + } + + public String getForUpdateString(String aliases) { + return getForUpdateString() + " of " + aliases; + } + + public String getForUpdateNowaitString(String aliases) { + return getForUpdateString() + " of " + aliases + " nowait"; + } + + public boolean bindLimitParametersInReverseOrder() { + return true; + } + + public boolean useMaxForLimit() { + return true; + } + + public boolean forUpdateOfColumns() { + return true; + } + + public String getQuerySequencesString() { + return "select sequence_name from user_sequences"; + } + + public String getSelectGUIDString() { + return "select rawtohex(sys_guid()) from dual"; + } + + public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { + return EXTRACTER; + } + + private static ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() { + + /** + * Extract the name of the violated constraint from the given SQLException. + * + * @param sqle The exception that was the result of the constraint violation. + * @return The extracted constraint name. + */ + public String extractConstraintName(SQLException sqle) { + int errorCode = JDBCExceptionHelper.extractErrorCode(sqle); + if ( errorCode == 1 || errorCode == 2291 || errorCode == 2292 ) { + return extractUsingTemplate( "constraint (", ") violated", sqle.getMessage() ); + } + else if ( errorCode == 1400 ) { + // simple nullability constraint + return null; + } + else { + return null; + } + } + + }; + + // not final-static to avoid possible classcast exceptions if using different oracle drivers. + int oracletypes_cursor_value = 0; + public int registerResultSetOutParameter(java.sql.CallableStatement statement,int col) throws SQLException { + if(oracletypes_cursor_value==0) { + try { + Class types = ReflectHelper.classForName("oracle.jdbc.driver.OracleTypes"); + oracletypes_cursor_value = types.getField("CURSOR").getInt(types.newInstance()); + } catch (Exception se) { + throw new HibernateException("Problem while trying to load or access OracleTypes.CURSOR value",se); + } + } + // register the type of the out param - an Oracle specific type + statement.registerOutParameter(col, oracletypes_cursor_value); + col++; + return col; + } + + public ResultSet getResultSet(CallableStatement ps) throws SQLException { + ps.execute(); + return ( ResultSet ) ps.getObject( 1 ); + } + + public boolean supportsUnionAll() { + return true; + } + + public boolean supportsCommentOn() { + return true; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String generateTemporaryTableName(String baseTableName) { + String name = super.generateTemporaryTableName(baseTableName); + return name.length() > 30 ? name.substring( 1, 30 ) : name; + } + + public String getCreateTemporaryTableString() { + return "create global temporary table"; + } + + public String getCreateTemporaryTablePostfix() { + return "on commit delete rows"; + } + + public boolean dropTemporaryTableAfterUse() { + return false; + } + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } + + public boolean supportsExistsInSelect() { + return false; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/Oracle9Dialect.java b/src/org/hibernate/dialect/Oracle9Dialect.java new file mode 100644 index 0000000000..bce9892268 --- /dev/null +++ b/src/org/hibernate/dialect/Oracle9Dialect.java @@ -0,0 +1,351 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.CallableStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.NvlFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.hibernate.util.ReflectHelper; + +/** + * An SQL dialect for Oracle 9 (uses ANSI-style syntax where possible). + * + * @deprecated Use either Oracle9iDialect or Oracle10gDialect instead + * @author Gavin King, David Channon + */ +public class Oracle9Dialect extends Dialect { + + private static final Log log = LogFactory.getLog( Oracle9Dialect.class ); + + public Oracle9Dialect() { + super(); + log.warn( "The Oracle9Dialect dialect has been deprecated; use either Oracle9iDialect or Oracle10gDialect instead" ); + registerColumnType( Types.BIT, "number(1,0)" ); + registerColumnType( Types.BIGINT, "number(19,0)" ); + registerColumnType( Types.SMALLINT, "number(5,0)" ); + registerColumnType( Types.TINYINT, "number(3,0)" ); + registerColumnType( Types.INTEGER, "number(10,0)" ); + registerColumnType( Types.CHAR, "char(1 char)" ); + registerColumnType( Types.VARCHAR, 4000, "varchar2($l char)" ); + registerColumnType( Types.VARCHAR, "long" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "date" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, 2000, "raw($l)" ); + registerColumnType( Types.VARBINARY, "long raw" ); + registerColumnType( Types.NUMERIC, "number($p,$s)" ); + registerColumnType( Types.DECIMAL, "number($p,$s)" ); + registerColumnType( Types.BLOB, "blob" ); + registerColumnType( Types.CLOB, "clob" ); + + // Oracle driver reports to support getGeneratedKeys(), but they only + // support the version taking an array of the names of the columns to + // be returned (via its RETURNING clause). No other driver seems to + // support this overloaded version. + getDefaultProperties().setProperty(Environment.USE_GET_GENERATED_KEYS, "false"); + getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true"); + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE); + + registerFunction( "abs", new StandardSQLFunction("abs") ); + registerFunction( "sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction( "acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction( "asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction( "atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction( "cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction( "cosh", new StandardSQLFunction("cosh", Hibernate.DOUBLE) ); + registerFunction( "exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction( "ln", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction( "sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction( "sinh", new StandardSQLFunction("sinh", Hibernate.DOUBLE) ); + registerFunction( "stddev", new StandardSQLFunction("stddev", Hibernate.DOUBLE) ); + registerFunction( "sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction( "tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + registerFunction( "tanh", new StandardSQLFunction("tanh", Hibernate.DOUBLE) ); + registerFunction( "variance", new StandardSQLFunction("variance", Hibernate.DOUBLE) ); + + registerFunction( "round", new StandardSQLFunction("round") ); + registerFunction( "trunc", new StandardSQLFunction("trunc") ); + registerFunction( "ceil", new StandardSQLFunction("ceil") ); + registerFunction( "floor", new StandardSQLFunction("floor") ); + + registerFunction( "chr", new StandardSQLFunction("chr", Hibernate.CHARACTER) ); + registerFunction( "initcap", new StandardSQLFunction("initcap") ); + registerFunction( "lower", new StandardSQLFunction("lower") ); + registerFunction( "ltrim", new StandardSQLFunction("ltrim") ); + registerFunction( "rtrim", new StandardSQLFunction("rtrim") ); + registerFunction( "soundex", new StandardSQLFunction("soundex") ); + registerFunction( "upper", new StandardSQLFunction("upper") ); + registerFunction( "ascii", new StandardSQLFunction("ascii", Hibernate.INTEGER) ); + registerFunction( "length", new StandardSQLFunction("length", Hibernate.LONG) ); + + registerFunction( "to_char", new StandardSQLFunction("to_char", Hibernate.STRING) ); + registerFunction( "to_date", new StandardSQLFunction("to_date", Hibernate.TIMESTAMP) ); + + registerFunction( "current_date", new NoArgSQLFunction("current_date", Hibernate.DATE, false) ); + registerFunction( "current_time", new NoArgSQLFunction("current_timestamp", Hibernate.TIME, false) ); + registerFunction( "current_timestamp", new NoArgSQLFunction("current_timestamp", Hibernate.TIMESTAMP, false) ); + + registerFunction( "lastday", new StandardSQLFunction("lastday", Hibernate.DATE) ); + registerFunction( "sysdate", new NoArgSQLFunction("sysdate", Hibernate.DATE, false) ); + registerFunction( "systimestamp", new NoArgSQLFunction("systimestamp", Hibernate.TIMESTAMP, false) ); + registerFunction( "uid", new NoArgSQLFunction("uid", Hibernate.INTEGER, false) ); + registerFunction( "user", new NoArgSQLFunction("user", Hibernate.STRING, false) ); + + registerFunction( "rowid", new NoArgSQLFunction("rowid", Hibernate.LONG, false) ); + registerFunction( "rownum", new NoArgSQLFunction("rownum", Hibernate.LONG, false) ); + + // Multi-param string dialect functions... + registerFunction( "concat", new VarArgsSQLFunction(Hibernate.STRING, "", "||", "") ); + registerFunction( "instr", new StandardSQLFunction("instr", Hibernate.INTEGER) ); + registerFunction( "instrb", new StandardSQLFunction("instrb", Hibernate.INTEGER) ); + registerFunction( "lpad", new StandardSQLFunction("lpad", Hibernate.STRING) ); + registerFunction( "replace", new StandardSQLFunction("replace", Hibernate.STRING) ); + registerFunction( "rpad", new StandardSQLFunction("rpad", Hibernate.STRING) ); + registerFunction( "substr", new StandardSQLFunction("substr", Hibernate.STRING) ); + registerFunction( "substrb", new StandardSQLFunction("substrb", Hibernate.STRING) ); + registerFunction( "translate", new StandardSQLFunction("translate", Hibernate.STRING) ); + + registerFunction( "substring", new StandardSQLFunction( "substr", Hibernate.STRING ) ); + registerFunction( "locate", new SQLFunctionTemplate( Hibernate.INTEGER, "instr(?2,?1)" ) ); + registerFunction( "bit_length", new SQLFunctionTemplate( Hibernate.INTEGER, "vsize(?1)*8" ) ); + registerFunction( "coalesce", new NvlFunction() ); + + // Multi-param numeric dialect functions... + registerFunction( "atan2", new StandardSQLFunction("atan2", Hibernate.FLOAT) ); + registerFunction( "log", new StandardSQLFunction("log", Hibernate.INTEGER) ); + registerFunction( "mod", new StandardSQLFunction("mod", Hibernate.INTEGER) ); + registerFunction( "nvl", new StandardSQLFunction("nvl") ); + registerFunction( "nvl2", new StandardSQLFunction("nvl2") ); + registerFunction( "power", new StandardSQLFunction("power", Hibernate.FLOAT) ); + + // Multi-param date dialect functions... + registerFunction( "add_months", new StandardSQLFunction("add_months", Hibernate.DATE) ); + registerFunction( "months_between", new StandardSQLFunction("months_between", Hibernate.FLOAT) ); + registerFunction( "next_day", new StandardSQLFunction("next_day", Hibernate.DATE) ); + + registerFunction( "str", new StandardSQLFunction("to_char", Hibernate.STRING) ); + } + + public String getAddColumnString() { + return "add"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select " + getSelectSequenceNextValString( sequenceName ) + " from dual"; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return sequenceName + ".nextval"; + } + + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; //starts with 1, implicitly + } + + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName; + } + + public String getCascadeConstraintsString() { + return " cascade constraints"; + } + + public boolean dropConstraints() { + return false; + } + + public String getForUpdateNowaitString() { + return " for update nowait"; + } + + public boolean supportsSequences() { + return true; + } + + public boolean supportsPooledSequences() { + return true; + } + + public boolean supportsLimit() { + return true; + } + + public String getLimitString(String sql, boolean hasOffset) { + + sql = sql.trim(); + boolean isForUpdate = false; + if ( sql.toLowerCase().endsWith(" for update") ) { + sql = sql.substring( 0, sql.length()-11 ); + isForUpdate = true; + } + + StringBuffer pagingSelect = new StringBuffer( sql.length()+100 ); + if (hasOffset) { + pagingSelect.append("select * from ( select row_.*, rownum rownum_ from ( "); + } + else { + pagingSelect.append("select * from ( "); + } + pagingSelect.append(sql); + if (hasOffset) { + pagingSelect.append(" ) row_ where rownum <= ?) where rownum_ > ?"); + } + else { + pagingSelect.append(" ) where rownum <= ?"); + } + + if ( isForUpdate ) { + pagingSelect.append( " for update" ); + } + + return pagingSelect.toString(); + } + + public String getForUpdateString(String aliases) { + return getForUpdateString() + " of " + aliases; + } + + public String getForUpdateNowaitString(String aliases) { + return getForUpdateString() + " of " + aliases + " nowait"; + } + + public boolean bindLimitParametersInReverseOrder() { + return true; + } + + public boolean useMaxForLimit() { + return true; + } + + public boolean forUpdateOfColumns() { + return true; + } + + public String getQuerySequencesString() { + return "select sequence_name from user_sequences"; + } + + public String getSelectGUIDString() { + return "select rawtohex(sys_guid()) from dual"; + } + + public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { + return EXTRACTER; + } + + private static ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() { + + /** + * Extract the name of the violated constraint from the given SQLException. + * + * @param sqle The exception that was the result of the constraint violation. + * @return The extracted constraint name. + */ + public String extractConstraintName(SQLException sqle) { + int errorCode = JDBCExceptionHelper.extractErrorCode(sqle); + if ( errorCode == 1 || errorCode == 2291 || errorCode == 2292 ) { + return extractUsingTemplate( "constraint (", ") violated", sqle.getMessage() ); + } + else if ( errorCode == 1400 ) { + // simple nullability constraint + return null; + } + else { + return null; + } + } + + }; + + // not final-static to avoid possible classcast exceptions if using different oracle drivers. + int oracletypes_cursor_value = 0; + public int registerResultSetOutParameter(java.sql.CallableStatement statement,int col) throws SQLException { + if(oracletypes_cursor_value==0) { + try { + Class types = ReflectHelper.classForName("oracle.jdbc.driver.OracleTypes"); + oracletypes_cursor_value = types.getField("CURSOR").getInt(types.newInstance()); + } catch (Exception se) { + throw new HibernateException("Problem while trying to load or access OracleTypes.CURSOR value",se); + } + } + // register the type of the out param - an Oracle specific type + statement.registerOutParameter(col, oracletypes_cursor_value); + col++; + return col; + } + + public ResultSet getResultSet(CallableStatement ps) throws SQLException { + ps.execute(); + return ( ResultSet ) ps.getObject( 1 ); + } + + public boolean supportsUnionAll() { + return true; + } + + public boolean supportsCommentOn() { + return true; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String generateTemporaryTableName(String baseTableName) { + String name = super.generateTemporaryTableName(baseTableName); + return name.length() > 30 ? name.substring( 1, 30 ) : name; + } + + public String getCreateTemporaryTableString() { + return "create global temporary table"; + } + + public String getCreateTemporaryTablePostfix() { + return "on commit delete rows"; + } + + public boolean dropTemporaryTableAfterUse() { + return false; + } + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public String getCurrentTimestampSelectString() { + return "select systimestamp from dual"; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } + + public boolean supportsExistsInSelect() { + return false; + } +} diff --git a/src/org/hibernate/dialect/Oracle9iDialect.java b/src/org/hibernate/dialect/Oracle9iDialect.java new file mode 100644 index 0000000000..25f0626ffe --- /dev/null +++ b/src/org/hibernate/dialect/Oracle9iDialect.java @@ -0,0 +1,77 @@ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.ANSICaseFragment; + +/** + * A dialect for Oracle 9i databases. + *

    + * Unlike the older (deprecated) {@Link Oracl9Dialect), this version specifies + * to not use "ANSI join syntax" because 9i does not seem to properly + * handle it in all cases. + * + * @author Steve Ebersole + */ +public class Oracle9iDialect extends Oracle8iDialect { + protected void registerCharacterTypeMappings() { + registerColumnType( Types.CHAR, "char(1 char)" ); + registerColumnType( Types.VARCHAR, 4000, "varchar2($l char)" ); + registerColumnType( Types.VARCHAR, "long" ); + } + + protected void registerDateTimeTypeMappings() { + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "date" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + } + + public CaseFragment createCaseFragment() { + // Oracle did add support for ANSI CASE statements in 9i + return new ANSICaseFragment(); + } + + public String getLimitString(String sql, boolean hasOffset) { + sql = sql.trim(); + boolean isForUpdate = false; + if ( sql.toLowerCase().endsWith(" for update") ) { + sql = sql.substring( 0, sql.length()-11 ); + isForUpdate = true; + } + + StringBuffer pagingSelect = new StringBuffer( sql.length()+100 ); + if (hasOffset) { + pagingSelect.append("select * from ( select row_.*, rownum rownum_ from ( "); + } + else { + pagingSelect.append("select * from ( "); + } + pagingSelect.append(sql); + if (hasOffset) { + pagingSelect.append(" ) row_ where rownum <= ?) where rownum_ > ?"); + } + else { + pagingSelect.append(" ) where rownum <= ?"); + } + + if ( isForUpdate ) { + pagingSelect.append( " for update" ); + } + + return pagingSelect.toString(); + } + + public String getSelectClauseNullString(int sqlType) { + return getBasicSelectClauseNullString( sqlType ); + } + + public String getCurrentTimestampSelectString() { + return "select systimestamp from dual"; + } + + public String getCurrentTimestampSQLFunctionName() { + // the standard SQL function name is current_timestamp... + return "current_timestamp"; + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/OracleDialect.java b/src/org/hibernate/dialect/OracleDialect.java new file mode 100644 index 0000000000..e12232a2f9 --- /dev/null +++ b/src/org/hibernate/dialect/OracleDialect.java @@ -0,0 +1,93 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.DecodeCaseFragment; +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.OracleJoinFragment; + +/** + * An SQL dialect for Oracle, compatible with Oracle 8. + * + * @deprecated Use Oracle8iDialect instead. + * @author Gavin King + */ +public class OracleDialect extends Oracle9Dialect { + + private static final Log log = LogFactory.getLog( OracleDialect.class ); + + public OracleDialect() { + super(); + log.warn( "The OracleDialect dialect has been deprecated; use Oracle8iDialect instead" ); + // Oracle8 and previous define only a "DATE" type which + // is used to represent all aspects of date/time + registerColumnType( Types.TIMESTAMP, "date" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, 4000, "varchar2($l)" ); + } + + public JoinFragment createOuterJoinFragment() { + return new OracleJoinFragment(); + } + public CaseFragment createCaseFragment() { + return new DecodeCaseFragment(); + } + + public String getLimitString(String sql, boolean hasOffset) { + + sql = sql.trim(); + boolean isForUpdate = false; + if ( sql.toLowerCase().endsWith(" for update") ) { + sql = sql.substring( 0, sql.length()-11 ); + isForUpdate = true; + } + + StringBuffer pagingSelect = new StringBuffer( sql.length()+100 ); + if (hasOffset) { + pagingSelect.append("select * from ( select row_.*, rownum rownum_ from ( "); + } + else { + pagingSelect.append("select * from ( "); + } + pagingSelect.append(sql); + if (hasOffset) { + pagingSelect.append(" ) row_ ) where rownum_ <= ? and rownum_ > ?"); + } + else { + pagingSelect.append(" ) where rownum <= ?"); + } + + if ( isForUpdate ) { + pagingSelect.append( " for update" ); + } + + return pagingSelect.toString(); + } + + public String getSelectClauseNullString(int sqlType) { + switch(sqlType) { + case Types.VARCHAR: + case Types.CHAR: + return "to_char(null)"; + case Types.DATE: + case Types.TIMESTAMP: + case Types.TIME: + return "to_date(null)"; + default: + return "to_number(null)"; + } + } + + public String getCurrentTimestampSelectString() { + return "select sysdate from dual"; + } + + public String getCurrentTimestampSQLFunctionName() { + return "sysdate"; + } +} diff --git a/src/org/hibernate/dialect/PointbaseDialect.java b/src/org/hibernate/dialect/PointbaseDialect.java new file mode 100644 index 0000000000..d2378366be --- /dev/null +++ b/src/org/hibernate/dialect/PointbaseDialect.java @@ -0,0 +1,69 @@ +//$Id$ +//Created on 04 February 2002, 17:35 +package org.hibernate.dialect; + +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.UpdateLockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.LockMode; + +import java.sql.Types; + +/** + * A Dialect for Pointbase. + * @author Ed Mackenzie + */ +public class PointbaseDialect extends org.hibernate.dialect.Dialect { + + /** + * Creates new PointbaseDialect + */ + public PointbaseDialect() { + super(); + registerColumnType( Types.BIT, "smallint" ); //no pointbase BIT + registerColumnType( Types.BIGINT, "bigint" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "smallint" ); //no pointbase TINYINT + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + //the BLOB type requires a size arguement - this defaults to + //bytes - no arg defaults to 1 whole byte! + //other argument mods include K - kilobyte, M - megabyte, G - gigabyte. + //refer to the PBdevelopers guide for more info. + registerColumnType( Types.VARBINARY, "blob($l)" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + } + + public String getAddColumnString() { + return "add"; + } + + public boolean dropConstraints() { + return false; + } + + public String getCascadeConstraintsString() { + return " cascade"; + } + + public String getForUpdateString() { + return ""; + } + + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + // Pointbase has no known variation of a "SELECT ... FOR UPDATE" syntax... + if ( lockMode.greaterThan( LockMode.READ ) ) { + return new UpdateLockingStrategy( lockable, lockMode ); + } + else { + return new SelectLockingStrategy( lockable, lockMode ); + } + } +} diff --git a/src/org/hibernate/dialect/PostgreSQLDialect.java b/src/org/hibernate/dialect/PostgreSQLDialect.java new file mode 100644 index 0000000000..e3fb4e6a68 --- /dev/null +++ b/src/org/hibernate/dialect/PostgreSQLDialect.java @@ -0,0 +1,312 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; +import java.sql.SQLException; + +import org.hibernate.Hibernate; +import org.hibernate.exception.ViolatedConstraintNameExtracter; +import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.PositionSubstringFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.id.SequenceGenerator; + +/** + * An SQL dialect for Postgres + * @author Gavin King + */ +public class PostgreSQLDialect extends Dialect { + + public PostgreSQLDialect() { + super(); + registerColumnType( Types.BIT, "bool" ); + registerColumnType( Types.BIGINT, "int8" ); + registerColumnType( Types.SMALLINT, "int2" ); + registerColumnType( Types.TINYINT, "int2" ); + registerColumnType( Types.INTEGER, "int4" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float4" ); + registerColumnType( Types.DOUBLE, "float8" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "bytea" ); + registerColumnType( Types.CLOB, "text" ); + registerColumnType( Types.BLOB, "oid" ); + registerColumnType( Types.NUMERIC, "numeric($p, $s)" ); + + registerFunction( "abs", new StandardSQLFunction("abs") ); + registerFunction( "sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction( "acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction( "asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction( "atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction( "cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction( "cot", new StandardSQLFunction("cot", Hibernate.DOUBLE) ); + registerFunction( "exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction( "ln", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction( "log", new StandardSQLFunction("log", Hibernate.DOUBLE) ); + registerFunction( "sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction( "sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction( "cbrt", new StandardSQLFunction("cbrt", Hibernate.DOUBLE) ); + registerFunction( "tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + registerFunction( "radians", new StandardSQLFunction("radians", Hibernate.DOUBLE) ); + registerFunction( "degrees", new StandardSQLFunction("degrees", Hibernate.DOUBLE) ); + + registerFunction( "stddev", new StandardSQLFunction("stddev", Hibernate.DOUBLE) ); + registerFunction( "variance", new StandardSQLFunction("variance", Hibernate.DOUBLE) ); + + registerFunction( "random", new NoArgSQLFunction("random", Hibernate.DOUBLE) ); + + registerFunction( "round", new StandardSQLFunction("round") ); + registerFunction( "trunc", new StandardSQLFunction("trunc") ); + registerFunction( "ceil", new StandardSQLFunction("ceil") ); + registerFunction( "floor", new StandardSQLFunction("floor") ); + + registerFunction( "chr", new StandardSQLFunction("chr", Hibernate.CHARACTER) ); + registerFunction( "lower", new StandardSQLFunction("lower") ); + registerFunction( "upper", new StandardSQLFunction("upper") ); + registerFunction( "substr", new StandardSQLFunction("substr", Hibernate.STRING) ); + registerFunction( "initcap", new StandardSQLFunction("initcap") ); + registerFunction( "to_ascii", new StandardSQLFunction("to_ascii") ); + registerFunction( "quote_ident", new StandardSQLFunction("quote_ident", Hibernate.STRING) ); + registerFunction( "quote_literal", new StandardSQLFunction("quote_literal", Hibernate.STRING) ); + registerFunction( "md5", new StandardSQLFunction("md5") ); + registerFunction( "ascii", new StandardSQLFunction("ascii", Hibernate.INTEGER) ); + registerFunction( "length", new StandardSQLFunction("length", Hibernate.LONG) ); + registerFunction( "char_length", new StandardSQLFunction("char_length", Hibernate.LONG) ); + registerFunction( "bit_length", new StandardSQLFunction("bit_length", Hibernate.LONG) ); + registerFunction( "octet_length", new StandardSQLFunction("octet_length", Hibernate.LONG) ); + + registerFunction( "current_date", new NoArgSQLFunction("current_date", Hibernate.DATE, false) ); + registerFunction( "current_time", new NoArgSQLFunction("current_time", Hibernate.TIME, false) ); + registerFunction( "current_timestamp", new NoArgSQLFunction("current_timestamp", Hibernate.TIMESTAMP, false) ); + registerFunction( "localtime", new NoArgSQLFunction("localtime", Hibernate.TIME, false) ); + registerFunction( "localtimestamp", new NoArgSQLFunction("localtimestamp", Hibernate.TIMESTAMP, false) ); + registerFunction( "now", new NoArgSQLFunction("now", Hibernate.TIMESTAMP) ); + registerFunction( "timeofday", new NoArgSQLFunction("timeofday", Hibernate.STRING) ); + registerFunction( "age", new StandardSQLFunction("age") ); + + registerFunction( "current_user", new NoArgSQLFunction("current_user", Hibernate.STRING, false) ); + registerFunction( "session_user", new NoArgSQLFunction("session_user", Hibernate.STRING, false) ); + registerFunction( "user", new NoArgSQLFunction("user", Hibernate.STRING, false) ); + registerFunction( "current_database", new NoArgSQLFunction("current_database", Hibernate.STRING, true) ); + registerFunction( "current_schema", new NoArgSQLFunction("current_schema", Hibernate.STRING, true) ); + + registerFunction( "to_char", new StandardSQLFunction("to_char", Hibernate.STRING) ); + registerFunction( "to_date", new StandardSQLFunction("to_date", Hibernate.DATE) ); + registerFunction( "to_timestamp", new StandardSQLFunction("to_timestamp", Hibernate.TIMESTAMP) ); + registerFunction( "to_number", new StandardSQLFunction("to_number", Hibernate.BIG_DECIMAL) ); + + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(","||",")" ) ); + + registerFunction( "locate", new PositionSubstringFunction() ); + + registerFunction( "str", new SQLFunctionTemplate(Hibernate.STRING, "cast(?1 as varchar)") ); + + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE); + } + + public String getAddColumnString() { + return "add column"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select " + getSelectSequenceNextValString( sequenceName ); + } + + public String getSelectSequenceNextValString(String sequenceName) { + return "nextval ('" + sequenceName + "')"; + } + + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; //starts with 1, implicitly + } + + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName; + } + + public String getCascadeConstraintsString() { + return "";//" cascade"; + } + public boolean dropConstraints() { + return true; + } + + public boolean supportsSequences() { + return true; + } + + public String getQuerySequencesString() { + return "select relname from pg_class where relkind='S'"; + } + + public boolean supportsLimit() { + return true; + } + + public String getLimitString(String sql, boolean hasOffset) { + return new StringBuffer( sql.length()+20 ) + .append(sql) + .append(hasOffset ? " limit ? offset ?" : " limit ?") + .toString(); + } + + public boolean bindLimitParametersInReverseOrder() { + return true; + } + + public boolean supportsIdentityColumns() { + return true; + } + + public String getForUpdateString(String aliases) { + return getForUpdateString() + " of " + aliases; + } + + public String getIdentitySelectString(String table, String column, int type) { + return new StringBuffer().append("select currval('") + .append(table) + .append('_') + .append(column) + .append("_seq')") + .toString(); + } + + public String getIdentityColumnString(int type) { + return type==Types.BIGINT ? + "bigserial not null" : + "serial not null"; + } + + public boolean hasDataTypeInIdentityColumn() { + return false; + } + + public String getNoColumnsInsertString() { + return "default values"; + } + + public Class getNativeIdentifierGeneratorClass() { + return SequenceGenerator.class; + } + + public boolean supportsOuterJoinForUpdate() { + return false; + } + + public boolean useInputStreamToInsertBlob() { + return false; + } + + public boolean supportsUnionAll() { + return true; + } + + /** + * Workaround for postgres bug #1453 + */ + public String getSelectClauseNullString(int sqlType) { + String typeName = getTypeName(sqlType, 1, 1, 0); + //trim off the length/precision/scale + int loc = typeName.indexOf('('); + if (loc>-1) { + typeName = typeName.substring(0, loc); + } + return "null::" + typeName; + } + + public boolean supportsCommentOn() { + return true; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String getCreateTemporaryTableString() { + return "create local temporary table"; + } + + public String getCreateTemporaryTablePostfix() { + return "on commit drop"; + } + + /*public boolean dropTemporaryTableAfterUse() { + //we have to, because postgres sets current tx + //to rollback only after a failed create table + return true; + }*/ + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + public String getCurrentTimestampSelectString() { + return "select now()"; + } + + public String toBooleanValueString(boolean bool) { + return bool ? "true" : "false"; + } + + public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { + return EXTRACTER; + } + + /** + * Constraint-name extractor for Postgres contraint violation exceptions. + * Orginally contributed by Denny Bartelt. + */ + private static ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() { + public String extractConstraintName(SQLException sqle) { + try { + int sqlState = Integer.valueOf( JDBCExceptionHelper.extractSqlState(sqle)).intValue(); + switch (sqlState) { + // CHECK VIOLATION + case 23514: return extractUsingTemplate("violates check constraint \"","\"", sqle.getMessage()); + // UNIQUE VIOLATION + case 23505: return extractUsingTemplate("violates unique constraint \"","\"", sqle.getMessage()); + // FOREIGN KEY VIOLATION + case 23503: return extractUsingTemplate("violates foreign key constraint \"","\"", sqle.getMessage()); + // NOT NULL VIOLATION + case 23502: return extractUsingTemplate("null value in column \"","\" violates not-null constraint", sqle.getMessage()); + // TODO: RESTRICT VIOLATION + case 23001: return null; + // ALL OTHER + default: return null; + } + } catch (NumberFormatException nfe) { + return null; + } + } + }; + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// seems to not really... +// public boolean supportsRowValueConstructorSyntax() { +// return true; +// } + + public boolean supportsEmptyInList() { + return false; + } + + public boolean supportsExpectedLobUsagePattern() { + // PostgreSQL seems to have spotty LOB suppport + return false; + } +} diff --git a/src/org/hibernate/dialect/ProgressDialect.java b/src/org/hibernate/dialect/ProgressDialect.java new file mode 100644 index 0000000000..d3c1344f5d --- /dev/null +++ b/src/org/hibernate/dialect/ProgressDialect.java @@ -0,0 +1,60 @@ +//$Id$ +// contributed by Phillip Baird +package org.hibernate.dialect; + +import java.sql.Types; + +/** + * An SQL dialect compatible with Progress 9.1C
    + *
    + * Connection Parameters required: + *

      + *
    • hibernate.dialect org.hibernate.sql.ProgressDialect + *
    • hibernate.driver com.progress.sql.jdbc.JdbcProgressDriver + *
    • hibernate.url jdbc:JdbcProgress:T:host:port:dbname;WorkArounds=536870912 + *
    • hibernate.username username + *
    • hibernate.password password + *
    + * The WorkArounds parameter in the URL is required to avoid an error + * in the Progress 9.1C JDBC driver related to PreparedStatements. + * @author Phillip Baird + * + */ +public class ProgressDialect extends Dialect { + public ProgressDialect() { + super(); + registerColumnType( Types.BIT, "bit" ); + registerColumnType( Types.BIGINT, "numeric" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "tinyint" ); + registerColumnType( Types.INTEGER, "integer" ); + registerColumnType( Types.CHAR, "character(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "real" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "varbinary($l)" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + } + + public boolean hasAlterTable(){ + return false; + } + + public String getAddColumnString() { + return "add column"; + } + + public boolean qualifyIndexName() { + return false; + } +} + + + + + + + diff --git a/src/org/hibernate/dialect/RDMSOS2200Dialect.java b/src/org/hibernate/dialect/RDMSOS2200Dialect.java new file mode 100755 index 0000000000..7e0e9974c5 --- /dev/null +++ b/src/org/hibernate/dialect/RDMSOS2200Dialect.java @@ -0,0 +1,322 @@ +/* + * Created on Aug 24, 2005 + * This is the Hibernate dialect for the Unisys 2200 Relational Database (RDMS). + * This dialect was developed for use with Hibernate 3.0.5. Other versions may + * require modifications to the dialect. + * + * Version History: + * Also change the version displayed below in the constructor + * 1.1 + * 1.0 2005-10-24 CDH - First dated version for use with CP 11 + */ +package org.hibernate.dialect; + +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.UpdateLockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; + +import java.sql.Types; +import org.hibernate.Hibernate; +import org.hibernate.LockMode; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.DecodeCaseFragment; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * @author Ploski and Hanson + */ +public class RDMSOS2200Dialect extends Dialect { + private static Log log = LogFactory.getLog(RDMSOS2200Dialect.class); + + public RDMSOS2200Dialect() { + super(); + // Display the dialect version. + log.info("RDMSOS2200Dialect version: 1.0"); + + /** + * This section registers RDMS Biult-in Functions (BIFs) with Hibernate. + * The first parameter is the 'register' function name with Hibernate. + * The second parameter is the defined RDMS SQL Function and it's + * characteristics. If StandardSQLFunction(...) is used, the RDMS BIF + * name and the return type (if any) is specified. If + * SQLFunctionTemplate(...) is used, the return type and a template + * string is provided, plus an optional hasParenthesesIfNoArgs flag. + */ + registerFunction( "abs", new StandardSQLFunction("abs") ); + registerFunction( "sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction("ascii", new StandardSQLFunction("ascii", Hibernate.INTEGER) ); + registerFunction("char_length", new StandardSQLFunction("char_length", Hibernate.INTEGER) ); + registerFunction("character_length", new StandardSQLFunction("character_length", Hibernate.INTEGER) ); + registerFunction("length", new StandardSQLFunction("length", Hibernate.INTEGER) ); + + // The RDMS concat() function only supports 2 parameters + registerFunction( "concat", new SQLFunctionTemplate(Hibernate.STRING, "concat(?1, ?2)") ); + registerFunction( "instr", new StandardSQLFunction("instr", Hibernate.STRING) ); + registerFunction( "lpad", new StandardSQLFunction("lpad", Hibernate.STRING) ); + registerFunction( "replace", new StandardSQLFunction("replace", Hibernate.STRING) ); + registerFunction( "rpad", new StandardSQLFunction("rpad", Hibernate.STRING) ); + registerFunction( "substr", new StandardSQLFunction("substr", Hibernate.STRING) ); + + registerFunction("lcase", new StandardSQLFunction("lcase") ); + registerFunction("lower", new StandardSQLFunction("lower") ); + registerFunction("ltrim", new StandardSQLFunction("ltrim") ); + registerFunction("reverse", new StandardSQLFunction("reverse") ); + registerFunction("rtrim", new StandardSQLFunction("rtrim") ); + + // RDMS does not directly support the trim() function, we use rtrim() and ltrim() + registerFunction("trim", new SQLFunctionTemplate(Hibernate.INTEGER, "ltrim(rtrim(?1))" ) ); + registerFunction("soundex", new StandardSQLFunction("soundex") ); + registerFunction("space", new StandardSQLFunction("space", Hibernate.STRING) ); + registerFunction("ucase", new StandardSQLFunction("ucase") ); + registerFunction("upper", new StandardSQLFunction("upper") ); + + registerFunction("acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction("asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction("atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction("cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction("cosh", new StandardSQLFunction("cosh", Hibernate.DOUBLE) ); + registerFunction("cot", new StandardSQLFunction("cot", Hibernate.DOUBLE) ); + registerFunction("exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction("ln", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction("log", new StandardSQLFunction("log", Hibernate.DOUBLE) ); + registerFunction("log10", new StandardSQLFunction("log10", Hibernate.DOUBLE) ); + registerFunction("pi", new NoArgSQLFunction("pi", Hibernate.DOUBLE) ); + registerFunction("rand", new NoArgSQLFunction("rand", Hibernate.DOUBLE) ); + registerFunction("sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction("sinh", new StandardSQLFunction("sinh", Hibernate.DOUBLE) ); + registerFunction("sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction("tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + registerFunction("tanh", new StandardSQLFunction("tanh", Hibernate.DOUBLE) ); + + registerFunction( "round", new StandardSQLFunction("round") ); + registerFunction( "trunc", new StandardSQLFunction("trunc") ); + registerFunction( "ceil", new StandardSQLFunction("ceil") ); + registerFunction( "floor", new StandardSQLFunction("floor") ); + + registerFunction( "chr", new StandardSQLFunction("chr", Hibernate.CHARACTER) ); + registerFunction( "initcap", new StandardSQLFunction("initcap") ); + + registerFunction( "user", new NoArgSQLFunction("user", Hibernate.STRING, false) ); + + registerFunction( "current_date", new NoArgSQLFunction("current_date", Hibernate.DATE, false) ); + registerFunction( "current_time", new NoArgSQLFunction("current_timestamp", Hibernate.TIME, false) ); + registerFunction( "current_timestamp", new NoArgSQLFunction("current_timestamp", Hibernate.TIMESTAMP, false) ); + registerFunction("curdate", new NoArgSQLFunction("curdate",Hibernate.DATE) ); + registerFunction("curtime", new NoArgSQLFunction("curtime",Hibernate.TIME) ); + registerFunction("days", new StandardSQLFunction("days",Hibernate.INTEGER) ); + registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth",Hibernate.INTEGER) ); + registerFunction("dayname", new StandardSQLFunction("dayname",Hibernate.STRING) ); + registerFunction("dayofweek", new StandardSQLFunction("dayofweek",Hibernate.INTEGER) ); + registerFunction("dayofyear", new StandardSQLFunction("dayofyear",Hibernate.INTEGER) ); + registerFunction("hour", new StandardSQLFunction("hour",Hibernate.INTEGER) ); + registerFunction("last_day", new StandardSQLFunction("last_day",Hibernate.DATE) ); + registerFunction("microsecond", new StandardSQLFunction("microsecond",Hibernate.INTEGER) ); + registerFunction("minute", new StandardSQLFunction("minute",Hibernate.INTEGER) ); + registerFunction("month", new StandardSQLFunction("month",Hibernate.INTEGER) ); + registerFunction("monthname", new StandardSQLFunction("monthname",Hibernate.STRING) ); + registerFunction("now", new NoArgSQLFunction("now",Hibernate.TIMESTAMP) ); + registerFunction("quarter", new StandardSQLFunction("quarter",Hibernate.INTEGER) ); + registerFunction("second", new StandardSQLFunction("second",Hibernate.INTEGER) ); + registerFunction("time", new StandardSQLFunction("time",Hibernate.TIME) ); + registerFunction("timestamp", new StandardSQLFunction("timestamp",Hibernate.TIMESTAMP) ); + registerFunction("week", new StandardSQLFunction("week",Hibernate.INTEGER) ); + registerFunction("year", new StandardSQLFunction("year",Hibernate.INTEGER) ); + + registerFunction("atan2", new StandardSQLFunction("atan2",Hibernate.DOUBLE) ); + registerFunction( "mod", new StandardSQLFunction("mod",Hibernate.INTEGER) ); + registerFunction( "nvl", new StandardSQLFunction("nvl") ); + registerFunction( "power", new StandardSQLFunction("power", Hibernate.DOUBLE) ); + + /** + * For a list of column types to register, see section A-1 + * in 7862 7395, the Unisys JDBC manual. + * + * Here are column sizes as documented in Table A-1 of + * 7831 0760, "Enterprise Relational Database Server + * for ClearPath OS2200 Administration Guide" + * Numeric - 21 + * Decimal - 22 (21 digits plus one for sign) + * Float - 60 bits + * Char - 28000 + * NChar - 14000 + * BLOB+ - 4294967296 (4 Gb) + * + RDMS JDBC driver does not support BLOBs + * + * DATE, TIME and TIMESTAMP literal formats are + * are all described in section 2.3.4 DATE Literal Format + * in 7830 8160. + * The DATE literal format is: YYYY-MM-DD + * The TIME literal format is: HH:MM:SS[.[FFFFFF]] + * The TIMESTAMP literal format is: YYYY-MM-DD HH:MM:SS[.[FFFFFF]] + * + * Note that $l (dollar-L) will use the length value if provided. + * Also new for Hibernate3 is the $p percision and $s (scale) parameters + */ + registerColumnType(Types.BIT, "SMALLINT"); + registerColumnType(Types.TINYINT, "SMALLINT"); + registerColumnType(Types.BIGINT, "NUMERIC(21,0)"); + registerColumnType(Types.SMALLINT, "SMALLINT"); + registerColumnType(Types.CHAR, "CHARACTER(1)"); + registerColumnType(Types.DOUBLE, "DOUBLE PRECISION"); + registerColumnType(Types.FLOAT, "FLOAT"); + registerColumnType(Types.REAL, "REAL"); + registerColumnType(Types.INTEGER, "INTEGER"); + registerColumnType(Types.NUMERIC, "NUMERIC(21,$l)"); + registerColumnType(Types.DECIMAL, "NUMERIC(21,$l)"); + registerColumnType(Types.DATE, "DATE"); + registerColumnType(Types.TIME, "TIME"); + registerColumnType(Types.TIMESTAMP, "TIMESTAMP"); + registerColumnType(Types.VARCHAR, "CHARACTER($l)"); + registerColumnType(Types.BLOB, "BLOB($l)" ); + /* + * The following types are not supported in RDMS/JDBC and therefore commented out. + * However, in some cases, mapping them to CHARACTER columns works + * for many applications, but does not work for all cases. + */ + // registerColumnType(Types.VARBINARY, "CHARACTER($l)"); + // registerColumnType(Types.BLOB, "CHARACTER($l)" ); // For use prior to CP 11.0 + // registerColumnType(Types.CLOB, "CHARACTER($l)" ); + } + + + // Dialect method overrides ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * RDMS does not support qualifing index names with the schema name. + */ + public boolean qualifyIndexName() { + return false; + } + + /** + * The RDMS DB supports the 'FOR UPDATE OF' clause. However, the RDMS-JDBC + * driver does not support this feature, so a false is return. + * The base dialect also returns a false, but we will leave this over-ride + * in to make sure it stays false. + */ + public boolean forUpdateOfColumns() { + return false; + } + + /** + * Since the RDMS-JDBC driver does not support for updates, this string is + * set to an empty string. Whenever, the driver does support this feature, + * the returned string should be " FOR UPDATE OF". Note that RDMS does not + * support the string 'FOR UPDATE' string. + */ + public String getForUpdateString() { + return ""; // Original Dialect.java returns " for update"; + } + + /** + * RDMS does not support adding Unique constraints via create and alter table. + */ + public boolean supportsUniqueConstraintInCreateAlterTable() { + return true; + } + + // Verify the state of this new method in Hibernate 3.0 Dialect.java + /** + * RDMS does not support Cascade Deletes. + * Need to review this in the future when support is provided. + */ + public boolean supportsCascadeDelete() { + return false; // Origial Dialect.java returns true; + } + + /** + * Currently, RDMS-JDBC does not support ForUpdate. + * Need to review this in the future when support is provided. + */ + public boolean supportsOuterJoinForUpdate() { + return false; + } + + public String getAddColumnString() { + return "add"; + } + + public String getNullColumnString() { + // The keyword used to specify a nullable column. + return " null"; + } + + // *** Sequence methods - start. The RDMS dialect needs these + + // methods to make it possible to use the Native Id generator + + public boolean supportsSequences() { + return true; + } + + public String getSequenceNextValString(String sequenceName) { + // The where clause was added to eliminate this statement from Brute Force Searches. + return "select permuted_id('NEXT',31) from rdms.rdms_dummy where key_col = 1 "; + } + + public String getCreateSequenceString(String sequenceName) { + // We must return a valid RDMS/RSA command from this method to + // prevent RDMS/RSA from issuing *ERROR 400 + return ""; + } + + public String getDropSequenceString(String sequenceName) { + // We must return a valid RDMS/RSA command from this method to + // prevent RDMS/RSA from issuing *ERROR 400 + return ""; + } + + // *** Sequence methods - end + + public String getCascadeConstraintsString() { + // Used with DROP TABLE to delete all records in the table. + return " including contents"; + } + + public CaseFragment createCaseFragment() { + return new DecodeCaseFragment(); + } + + public boolean supportsLimit() { + return true; + } + + public boolean supportsLimitOffset() { + return false; + } + + public String getLimitString(String sql, int offset, int limit) { + if (offset>0) throw new UnsupportedOperationException("RDMS does not support paged queries"); + return new StringBuffer(sql.length() + 40) + .append(sql) + .append(" fetch first ") + .append(limit) + .append(" rows only ") + .toString(); + } + + public boolean supportsVariableLimit() { + return false; + } + + public boolean supportsUnionAll() { + // RDMS supports the UNION ALL clause. + return true; + } + + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + // RDMS has no known variation of a "SELECT ... FOR UPDATE" syntax... + if ( lockMode.greaterThan( LockMode.READ ) ) { + return new UpdateLockingStrategy( lockable, lockMode ); + } + else { + return new SelectLockingStrategy( lockable, lockMode ); + } + } +} diff --git a/src/org/hibernate/dialect/SAPDBDialect.java b/src/org/hibernate/dialect/SAPDBDialect.java new file mode 100644 index 0000000000..2c6ad49145 --- /dev/null +++ b/src/org/hibernate/dialect/SAPDBDialect.java @@ -0,0 +1,195 @@ +//$Id$ +// contributed by Brad Clow +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.DecodeCaseFragment; +import org.hibernate.sql.OracleJoinFragment; +import org.hibernate.sql.JoinFragment; +import org.hibernate.util.StringHelper; + +/** + * An SQL dialect compatible with SAP DB. + * @author Brad Clow + */ +public class SAPDBDialect extends Dialect { + + public SAPDBDialect() { + super(); + registerColumnType( Types.BIT, "boolean" ); + registerColumnType( Types.BIGINT, "fixed(19,0)" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "fixed(3,0)" ); + registerColumnType( Types.INTEGER, "int" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIMESTAMP, "timestamp" ); + registerColumnType( Types.VARBINARY, "long byte" ); + registerColumnType( Types.NUMERIC, "fixed($p,$s)" ); + registerColumnType( Types.CLOB, "long varchar" ); + registerColumnType( Types.BLOB, "long byte" ); + + registerFunction( "abs", new StandardSQLFunction("abs") ); + registerFunction( "sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction( "exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction( "ln", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction( "log", new StandardSQLFunction("ln", Hibernate.DOUBLE) ); + registerFunction( "pi", new NoArgSQLFunction("pi", Hibernate.DOUBLE) ); + registerFunction( "power", new StandardSQLFunction("power") ); + registerFunction( "acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction( "asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction( "atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction( "cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction( "cosh", new StandardSQLFunction("cosh", Hibernate.DOUBLE) ); + registerFunction( "cot", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction( "sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction( "sinh", new StandardSQLFunction("sinh", Hibernate.DOUBLE) ); + registerFunction( "tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + registerFunction( "tanh", new StandardSQLFunction("tanh", Hibernate.DOUBLE) ); + registerFunction( "radians", new StandardSQLFunction("radians", Hibernate.DOUBLE) ); + registerFunction( "degrees", new StandardSQLFunction("degrees", Hibernate.DOUBLE) ); + registerFunction( "atan2", new StandardSQLFunction("atan2", Hibernate.DOUBLE) ); + + registerFunction( "round", new StandardSQLFunction("round") ); + registerFunction( "trunc", new StandardSQLFunction("trunc") ); + registerFunction( "ceil", new StandardSQLFunction("ceil") ); + registerFunction( "floor", new StandardSQLFunction("floor") ); + registerFunction( "greatest", new StandardSQLFunction("greatest") ); + registerFunction( "least", new StandardSQLFunction("least") ); + + registerFunction("time", new StandardSQLFunction("time", Hibernate.TIME) ); + registerFunction("timestamp", new StandardSQLFunction("timestamp", Hibernate.TIMESTAMP) ); + registerFunction("date", new StandardSQLFunction("date", Hibernate.DATE) ); + registerFunction("microsecond", new StandardSQLFunction("microsecond", Hibernate.INTEGER) ); + + registerFunction("dayname", new StandardSQLFunction("dayname", Hibernate.STRING) ); + registerFunction("monthname", new StandardSQLFunction("monthname", Hibernate.STRING) ); + registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth", Hibernate.INTEGER) ); + registerFunction("dayofweek", new StandardSQLFunction("dayofweek", Hibernate.INTEGER) ); + registerFunction("dayofyear", new StandardSQLFunction("dayofyear", Hibernate.INTEGER) ); + registerFunction("weekofyear", new StandardSQLFunction("weekofyear", Hibernate.INTEGER) ); + + registerFunction( "replace", new StandardSQLFunction("replace", Hibernate.STRING) ); + registerFunction( "translate", new StandardSQLFunction("translate", Hibernate.STRING) ); + registerFunction( "lpad", new StandardSQLFunction("lpad", Hibernate.STRING) ); + registerFunction( "rpad", new StandardSQLFunction("rpad", Hibernate.STRING) ); + registerFunction( "substr", new StandardSQLFunction("substr", Hibernate.STRING) ); + registerFunction( "initcap", new StandardSQLFunction("initcap", Hibernate.STRING) ); + registerFunction( "lower", new StandardSQLFunction("lower", Hibernate.STRING) ); + registerFunction( "ltrim", new StandardSQLFunction("ltrim", Hibernate.STRING) ); + registerFunction( "rtrim", new StandardSQLFunction("rtrim", Hibernate.STRING) ); + registerFunction( "lfill", new StandardSQLFunction("ltrim", Hibernate.STRING) ); + registerFunction( "rfill", new StandardSQLFunction("rtrim", Hibernate.STRING) ); + registerFunction( "soundex", new StandardSQLFunction("soundex", Hibernate.STRING) ); + registerFunction( "upper", new StandardSQLFunction("upper", Hibernate.STRING) ); + registerFunction( "ascii", new StandardSQLFunction("ascii", Hibernate.STRING) ); + registerFunction( "index", new StandardSQLFunction("index", Hibernate.INTEGER) ); + + registerFunction( "value", new StandardSQLFunction( "value" ) ); + + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(", "||", ")" ) ); + registerFunction( "substring", new StandardSQLFunction( "substr", Hibernate.STRING ) ); + registerFunction( "locate", new StandardSQLFunction("index", Hibernate.INTEGER) ); + registerFunction( "coalesce", new StandardSQLFunction( "value" ) ); + + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE); + + } + + public boolean dropConstraints() { + return false; + } + + public String getAddColumnString() { + return "add"; + } + + public String getAddForeignKeyConstraintString( + String constraintName, + String[] foreignKey, + String referencedTable, + String[] primaryKey, boolean referencesPrimaryKey + ) { + StringBuffer res = new StringBuffer(30) + .append(" foreign key ") + .append(constraintName) + .append(" (") + .append( StringHelper.join(", ", foreignKey) ) + .append(") references ") + .append(referencedTable); + + if(!referencesPrimaryKey) { + res.append(" (") + .append( StringHelper.join(", ", primaryKey) ) + .append(')'); + } + + return res.toString(); + } + + public String getAddPrimaryKeyConstraintString(String constraintName) { + return " primary key "; + } + + public String getNullColumnString() { + return " null"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select " + getSelectSequenceNextValString( sequenceName ) + " from dual"; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return sequenceName + ".nextval"; + } + + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; + } + + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName; + } + + public String getQuerySequencesString() { + return "select sequence_name from domain.sequences"; + } + + public JoinFragment createOuterJoinFragment() { + return new OracleJoinFragment(); + } + + + public boolean supportsSequences() { + return true; + } + + public CaseFragment createCaseFragment() { + return new DecodeCaseFragment(); + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String getCreateTemporaryTablePostfix() { + return "ignore rollback"; + } + + public String generateTemporaryTableName(String baseTableName) { + return "temp." + super.generateTemporaryTableName(baseTableName); + } + +} diff --git a/src/org/hibernate/dialect/SQLServerDialect.java b/src/org/hibernate/dialect/SQLServerDialect.java new file mode 100644 index 0000000000..0c61ae2c95 --- /dev/null +++ b/src/org/hibernate/dialect/SQLServerDialect.java @@ -0,0 +1,138 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.LockMode; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.AnsiTrimEmulationFunction; + +/** + * A dialect for Microsoft SQL Server 2000 and 2005 + * + * @author Gavin King + */ +public class SQLServerDialect extends SybaseDialect { + + public SQLServerDialect() { + registerColumnType( Types.VARBINARY, "image" ); + registerColumnType( Types.VARBINARY, 8000, "varbinary($l)" ); + + registerFunction( "second", new SQLFunctionTemplate(Hibernate.INTEGER, "datepart(second, ?1)") ); + registerFunction( "minute", new SQLFunctionTemplate(Hibernate.INTEGER, "datepart(minute, ?1)") ); + registerFunction( "hour", new SQLFunctionTemplate(Hibernate.INTEGER, "datepart(hour, ?1)") ); + registerFunction( "locate", new StandardSQLFunction("charindex", Hibernate.INTEGER) ); + + registerFunction( "extract", new SQLFunctionTemplate( Hibernate.INTEGER, "datepart(?1, ?3)" ) ); + registerFunction( "mod", new SQLFunctionTemplate( Hibernate.INTEGER, "?1 % ?2" ) ); + registerFunction( "bit_length", new SQLFunctionTemplate( Hibernate.INTEGER, "datalength(?1) * 8" ) ); + + registerFunction( "trim", new AnsiTrimEmulationFunction() ); + + registerKeyword("top"); + } + + public String getNoColumnsInsertString() { + return "default values"; + } + + static int getAfterSelectInsertPoint(String sql) { + int selectIndex = sql.toLowerCase().indexOf( "select" ); + final int selectDistinctIndex = sql.toLowerCase().indexOf( "select distinct" ); + return selectIndex + ( selectDistinctIndex == selectIndex ? 15 : 6 ); + } + + public String getLimitString(String querySelect, int offset, int limit) { + if ( offset > 0 ) { + throw new UnsupportedOperationException( "sql server has no offset" ); + } + return new StringBuffer( querySelect.length()+8 ) + .append(querySelect) + .insert( getAfterSelectInsertPoint(querySelect), " top " + limit ) + .toString(); + } + + /** + * Use insert table(...) values(...) select SCOPE_IDENTITY() + */ + public String appendIdentitySelectToInsert(String insertSQL) { + return insertSQL + " select scope_identity()"; + } + + public boolean supportsLimit() { + return true; + } + + public boolean useMaxForLimit() { + return true; + } + + public boolean supportsLimitOffset() { + return false; + } + + public boolean supportsVariableLimit() { + return false; + } + + public char closeQuote() { + return ']'; + } + + public char openQuote() { + return '['; + } + + public String appendLockHint(LockMode mode, String tableName) { + if ( mode.greaterThan( LockMode.READ ) ) { + // does this need holdlock also? : return tableName + " with (updlock, rowlock, holdlock)"; + return tableName + " with (updlock, rowlock)"; + } + else { + return tableName; + } + } + + public String getSelectGUIDString() { + return "select newid()"; + } + + // The current_timestamp is more accurate, but only known to be supported + // in SQL Server 7.0 and later (i.e., Sybase not known to support it at all) + public String getCurrentTimestampSelectString() { + return "select current_timestamp"; + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean areStringComparisonsCaseInsensitive() { + return true; + } + + public boolean supportsResultSetPositionQueryMethodsOnForwardOnlyCursor() { + return false; + } + + public boolean supportsCircularCascadeDeleteConstraints() { + // SQL Server (at least up through 2005) does not support defining + // cascade delete constraints which can circel back to the mutating + // table + return false; + } + + public boolean supportsLobValueChangePropogation() { + // note: at least my local SQL Server 2005 Express shows this not working... + return false; + } + + public boolean doesReadCommittedCauseWritersToBlockReaders() { + return false; // here assume SQLServer2005 using snapshot isolation, which does not have this problem + } + + public boolean doesRepeatableReadCauseReadersToBlockWriters() { + return false; // here assume SQLServer2005 using snapshot isolation, which does not have this problem + } +} diff --git a/src/org/hibernate/dialect/Sybase11Dialect.java b/src/org/hibernate/dialect/Sybase11Dialect.java new file mode 100644 index 0000000000..23c7eea56f --- /dev/null +++ b/src/org/hibernate/dialect/Sybase11Dialect.java @@ -0,0 +1,20 @@ +//$Id$ +package org.hibernate.dialect; + +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.Sybase11JoinFragment; + +/** + * A SQL dialect suitable for use with Sybase 11.9.2 (specifically: avoids ANSI JOIN syntax) + * @author Colm O' Flaherty + */ +public class Sybase11Dialect extends SybaseDialect { + public Sybase11Dialect() { + super(); + } + + public JoinFragment createOuterJoinFragment() { + return new Sybase11JoinFragment(); + } + +} diff --git a/src/org/hibernate/dialect/SybaseAnywhereDialect.java b/src/org/hibernate/dialect/SybaseAnywhereDialect.java new file mode 100644 index 0000000000..1be63fccc8 --- /dev/null +++ b/src/org/hibernate/dialect/SybaseAnywhereDialect.java @@ -0,0 +1,32 @@ +package org.hibernate.dialect; + +/** + * SQL Dialect for Sybase Anywhere + * extending Sybase (Enterprise) Dialect + * (Tested on ASA 8.x) + * @author ? + */ +public class SybaseAnywhereDialect extends SybaseDialect { + + /** + * Sybase Anywhere syntax would require a "DEFAULT" for each column specified, + * but I suppose Hibernate use this syntax only with tables with just 1 column + */ + public String getNoColumnsInsertString() { + return "values (default)"; + } + + + /** + * ASA does not require to drop constraint before dropping tables, and DROP statement + * syntax used by Hibernate to drop constraint is not compatible with ASA, so disable it + */ + public boolean dropConstraints() { + return false; + } + + public boolean supportsInsertSelectIdentity() { + return false; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/SybaseDialect.java b/src/org/hibernate/dialect/SybaseDialect.java new file mode 100644 index 0000000000..2766cf6fe4 --- /dev/null +++ b/src/org/hibernate/dialect/SybaseDialect.java @@ -0,0 +1,238 @@ +//$Id$ +package org.hibernate.dialect; + +import java.sql.CallableStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.Map; +import java.util.Iterator; + +import org.hibernate.Hibernate; +import org.hibernate.LockMode; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.CharIndexFunction; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.function.VarArgsSQLFunction; + +/** + * An SQL dialect compatible with Sybase and MS SQL Server. + * @author Gavin King + */ + +public class SybaseDialect extends Dialect { + public SybaseDialect() { + super(); + registerColumnType( Types.BIT, "tinyint" ); //Sybase BIT type does not support null values + registerColumnType( Types.BIGINT, "numeric(19,0)" ); + registerColumnType( Types.SMALLINT, "smallint" ); + registerColumnType( Types.TINYINT, "tinyint" ); + registerColumnType( Types.INTEGER, "int" ); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.VARCHAR, "varchar($l)" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.DOUBLE, "double precision" ); + registerColumnType( Types.DATE, "datetime" ); + registerColumnType( Types.TIME, "datetime" ); + registerColumnType( Types.TIMESTAMP, "datetime" ); + registerColumnType( Types.VARBINARY, "varbinary($l)" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + registerColumnType( Types.BLOB, "image" ); + registerColumnType( Types.CLOB, "text" ); + + registerFunction( "ascii", new StandardSQLFunction("ascii", Hibernate.INTEGER) ); + registerFunction( "char", new StandardSQLFunction("char", Hibernate.CHARACTER) ); + registerFunction( "len", new StandardSQLFunction("len", Hibernate.LONG) ); + registerFunction( "lower", new StandardSQLFunction("lower") ); + registerFunction( "upper", new StandardSQLFunction("upper") ); + registerFunction( "str", new StandardSQLFunction("str", Hibernate.STRING) ); + registerFunction( "ltrim", new StandardSQLFunction("ltrim") ); + registerFunction( "rtrim", new StandardSQLFunction("rtrim") ); + registerFunction( "reverse", new StandardSQLFunction("reverse") ); + registerFunction( "space", new StandardSQLFunction("space", Hibernate.STRING) ); + + registerFunction( "user", new NoArgSQLFunction("user", Hibernate.STRING) ); + + registerFunction( "current_timestamp", new NoArgSQLFunction("getdate", Hibernate.TIMESTAMP) ); + registerFunction( "current_time", new NoArgSQLFunction("getdate", Hibernate.TIME) ); + registerFunction( "current_date", new NoArgSQLFunction("getdate", Hibernate.DATE) ); + + registerFunction( "getdate", new NoArgSQLFunction("getdate", Hibernate.TIMESTAMP) ); + registerFunction( "getutcdate", new NoArgSQLFunction("getutcdate", Hibernate.TIMESTAMP) ); + registerFunction( "day", new StandardSQLFunction("day", Hibernate.INTEGER) ); + registerFunction( "month", new StandardSQLFunction("month", Hibernate.INTEGER) ); + registerFunction( "year", new StandardSQLFunction("year", Hibernate.INTEGER) ); + registerFunction( "datename", new StandardSQLFunction("datename", Hibernate.STRING) ); + + registerFunction( "abs", new StandardSQLFunction("abs") ); + registerFunction( "sign", new StandardSQLFunction("sign", Hibernate.INTEGER) ); + + registerFunction( "acos", new StandardSQLFunction("acos", Hibernate.DOUBLE) ); + registerFunction( "asin", new StandardSQLFunction("asin", Hibernate.DOUBLE) ); + registerFunction( "atan", new StandardSQLFunction("atan", Hibernate.DOUBLE) ); + registerFunction( "cos", new StandardSQLFunction("cos", Hibernate.DOUBLE) ); + registerFunction( "cot", new StandardSQLFunction("cot", Hibernate.DOUBLE) ); + registerFunction( "exp", new StandardSQLFunction("exp", Hibernate.DOUBLE) ); + registerFunction( "log", new StandardSQLFunction( "log", Hibernate.DOUBLE) ); + registerFunction( "log10", new StandardSQLFunction("log10", Hibernate.DOUBLE) ); + registerFunction( "sin", new StandardSQLFunction("sin", Hibernate.DOUBLE) ); + registerFunction( "sqrt", new StandardSQLFunction("sqrt", Hibernate.DOUBLE) ); + registerFunction( "tan", new StandardSQLFunction("tan", Hibernate.DOUBLE) ); + registerFunction( "pi", new NoArgSQLFunction("pi", Hibernate.DOUBLE) ); + registerFunction( "square", new StandardSQLFunction("square") ); + registerFunction( "rand", new StandardSQLFunction("rand", Hibernate.FLOAT) ); + + registerFunction("radians", new StandardSQLFunction("radians", Hibernate.DOUBLE) ); + registerFunction("degrees", new StandardSQLFunction("degrees", Hibernate.DOUBLE) ); + + registerFunction( "round", new StandardSQLFunction("round") ); + registerFunction( "ceiling", new StandardSQLFunction("ceiling") ); + registerFunction( "floor", new StandardSQLFunction("floor") ); + + registerFunction( "isnull", new StandardSQLFunction("isnull") ); + + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(","+",")" ) ); + + registerFunction( "length", new StandardSQLFunction( "len", Hibernate.INTEGER ) ); + registerFunction( "trim", new SQLFunctionTemplate( Hibernate.STRING, "ltrim(rtrim(?1))") ); + registerFunction( "locate", new CharIndexFunction() ); + + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, NO_BATCH); + } + + public String getAddColumnString() { + return "add"; + } + public String getNullColumnString() { + return " null"; + } + public boolean qualifyIndexName() { + return false; + } + + public String getForUpdateString() { + return ""; + } + + public boolean supportsIdentityColumns() { + return true; + } + public String getIdentitySelectString() { + return "select @@identity"; + } + public String getIdentityColumnString() { + return "identity not null"; //starts with 1, implicitly + } + + public boolean supportsInsertSelectIdentity() { + return true; + } + + public String appendIdentitySelectToInsert(String insertSQL) { + return insertSQL + "\nselect @@identity"; + } + + public String appendLockHint(LockMode mode, String tableName) { + if ( mode.greaterThan( LockMode.READ ) ) { + return tableName + " holdlock"; + } + else { + return tableName; + } + } + + public String applyLocksToSql(String sql, Map aliasedLockModes, Map keyColumnNames) { + Iterator itr = aliasedLockModes.entrySet().iterator(); + StringBuffer buffer = new StringBuffer( sql ); + int correction = 0; + while ( itr.hasNext() ) { + final Map.Entry entry = ( Map.Entry ) itr.next(); + final LockMode lockMode = ( LockMode ) entry.getValue(); + if ( lockMode.greaterThan( LockMode.READ ) ) { + final String alias = ( String ) entry.getKey(); + int start = -1, end = -1; + if ( sql.endsWith( " " + alias ) ) { + start = ( sql.length() - alias.length() ) + correction; + end = start + alias.length(); + } + else { + int position = sql.indexOf( " " + alias + " " ); + if ( position <= -1 ) { + position = sql.indexOf( " " + alias + "," ); + } + if ( position > -1 ) { + start = position + correction + 1; + end = start + alias.length(); + } + } + + if ( start > -1 ) { + final String lockHint = appendLockHint( lockMode, alias ); + buffer.replace( start, end, lockHint ); + correction += ( lockHint.length() - alias.length() ); + } + } + } + return buffer.toString(); + } + + public int registerResultSetOutParameter(CallableStatement statement, int col) throws SQLException { + return col; // sql server just returns automatically + } + + public ResultSet getResultSet(CallableStatement ps) throws SQLException { + boolean isResultSet = ps.execute(); +// This assumes you will want to ignore any update counts + while ( !isResultSet && ps.getUpdateCount() != -1 ) { + isResultSet = ps.getMoreResults(); + } +// You may still have other ResultSets or update counts left to process here +// but you can't do it now or the ResultSet you just got will be closed + return ps.getResultSet(); + } + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + public String getCurrentTimestampSelectString() { + return "select getdate()"; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String generateTemporaryTableName(String baseTableName) { + return "#" + baseTableName; + } + + public boolean dropTemporaryTableAfterUse() { + return true; // sql-server, at least needed this dropped after use; strange! + } + + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } + + public boolean supportsExistsInSelect() { + return false; + } + + public boolean doesReadCommittedCauseWritersToBlockReaders() { + return true; + } + + public boolean doesRepeatableReadCauseReadersToBlockWriters() { + return true; + } +} diff --git a/src/org/hibernate/dialect/TeradataDialect.java b/src/org/hibernate/dialect/TeradataDialect.java new file mode 100644 index 0000000000..55723c0030 --- /dev/null +++ b/src/org/hibernate/dialect/TeradataDialect.java @@ -0,0 +1,237 @@ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.SQLFunctionTemplate; +import org.hibernate.dialect.function.VarArgsSQLFunction; + +/** + * A dialect for the Teradata database created by MCR as part of the + * dialect certification process. + * + * @author Jay Nance + */ +public class TeradataDialect extends Dialect { + + /** + * Constructor + */ + public TeradataDialect() { + super(); + //registerColumnType data types + registerColumnType( Types.NUMERIC, "NUMERIC($p,$s)" ); + registerColumnType( Types.DOUBLE, "DOUBLE PRECISION" ); + registerColumnType( Types.BIGINT, "NUMERIC(18,0)" ); + registerColumnType( Types.BIT, "BYTEINT" ); + registerColumnType( Types.TINYINT, "BYTEINT" ); + registerColumnType( Types.VARBINARY, "VARBYTE($l)" ); + registerColumnType( Types.BINARY, "BYTEINT" ); + registerColumnType( Types.LONGVARCHAR, "LONG VARCHAR" ); + registerColumnType( Types.CHAR, "CHAR(1)" ); + registerColumnType( Types.DECIMAL, "DECIMAL" ); + registerColumnType( Types.INTEGER, "INTEGER" ); + registerColumnType( Types.SMALLINT, "SMALLINT" ); + registerColumnType( Types.FLOAT, "FLOAT" ); + registerColumnType( Types.VARCHAR, "VARCHAR($l)" ); + registerColumnType( Types.DATE, "DATE" ); + registerColumnType( Types.TIME, "TIME" ); + registerColumnType( Types.TIMESTAMP, "TIMESTAMP" ); + registerColumnType( Types.BOOLEAN, "BYTEINT" ); // hibernate seems to ignore this type... + registerColumnType( Types.BLOB, "BLOB" ); + registerColumnType( Types.CLOB, "CLOB" ); + + registerFunction( "year", new SQLFunctionTemplate( Hibernate.INTEGER, "extract(year from ?1)" ) ); + registerFunction( "length", new SQLFunctionTemplate( Hibernate.INTEGER, "character_length(?1)" ) ); + registerFunction( "concat", new VarArgsSQLFunction( Hibernate.STRING, "(", "||", ")" ) ); + registerFunction( "substring", new SQLFunctionTemplate( Hibernate.STRING, "substring(?1 from ?2 for ?3)" ) ); + registerFunction( "locate", new SQLFunctionTemplate( Hibernate.STRING, "position(?1 in ?2)" ) ); + registerFunction( "mod", new SQLFunctionTemplate( Hibernate.STRING, "?1 mod ?2" ) ); + registerFunction( "str", new SQLFunctionTemplate( Hibernate.STRING, "cast(?1 as varchar(255))" ) ); + + // bit_length feels a bit broken to me. We have to cast to char in order to + // pass when a numeric value is supplied. But of course the answers given will + // be wildly different for these two datatypes. 1234.5678 will be 9 bytes as + // a char string but will be 8 or 16 bytes as a true numeric. + // Jay Nance 2006-09-22 + registerFunction( + "bit_length", new SQLFunctionTemplate( Hibernate.INTEGER, "octet_length(cast(?1 as char))*4" ) + ); + + // The preference here would be + // SQLFunctionTemplate( Hibernate.TIMESTAMP, "current_timestamp(?1)", false) + // but this appears not to work. + // Jay Nance 2006-09-22 + registerFunction( "current_timestamp", new SQLFunctionTemplate( Hibernate.TIMESTAMP, "current_timestamp" ) ); + registerFunction( "current_time", new SQLFunctionTemplate( Hibernate.TIMESTAMP, "current_time" ) ); + registerFunction( "current_date", new SQLFunctionTemplate( Hibernate.TIMESTAMP, "current_date" ) ); + // IBID for current_time and current_date + + registerKeyword( "password" ); + registerKeyword( "type" ); + registerKeyword( "title" ); + registerKeyword( "year" ); + registerKeyword( "month" ); + registerKeyword( "summary" ); + registerKeyword( "alias" ); + registerKeyword( "value" ); + registerKeyword( "first" ); + registerKeyword( "role" ); + registerKeyword( "account" ); + registerKeyword( "class" ); + + // Tell hibernate to use getBytes instead of getBinaryStream + getDefaultProperties().setProperty( Environment.USE_STREAMS_FOR_BINARY, "false" ); + // No batch statements + getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, NO_BATCH ); + } + + /** + * Does this dialect support the FOR UPDATE syntax? + * + * @return empty string ... Teradata does not support FOR UPDATE syntax + */ + public String getForUpdateString() { + return ""; + } + + public boolean supportsIdentityColumns() { + return false; + } + + public boolean supportsSequences() { + return false; + } + + public String getAddColumnString() { + return "Add Column"; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String getCreateTemporaryTableString() { + return "create global temporary table"; + } + + public String getCreateTemporaryTablePostfix() { + return " on commit preserve rows"; + } + + public Boolean performTemporaryTableDDLInIsolation() { + return Boolean.TRUE; + } + + public boolean dropTemporaryTableAfterUse() { + return false; + } + + /** + * Get the name of the database type associated with the given + * java.sql.Types typecode. + * + * @param code java.sql.Types typecode + * @param length the length or precision of the column + * @param precision the precision of the column + * @param scale the scale of the column + * + * @return the database type name + * + * @throws HibernateException + */ + public String getTypeName(int code, int length, int precision, int scale) throws HibernateException { + /* + * We might want a special case for 19,2. This is very common for money types + * and here it is converted to 18,1 + */ + float f = precision > 0 ? ( float ) scale / ( float ) precision : 0; + int p = ( precision > 18 ? 18 : precision ); + int s = ( precision > 18 ? ( int ) ( 18.0 * f ) : ( scale > 18 ? 18 : scale ) ); + + return super.getTypeName( code, length, p, s ); + } + + public boolean supportsCascadeDelete() { + return false; + } + + public boolean supportsCircularCascadeDeleteConstraints() { + return false; + } + + public boolean areStringComparisonsCaseInsensitive() { + return true; + } + + public boolean supportsEmptyInList() { + return false; + } + + public String getSelectClauseNullString(int sqlType) { + String v = "null"; + + switch ( sqlType ) { + case Types.BIT: + case Types.TINYINT: + case Types.SMALLINT: + case Types.INTEGER: + case Types.BIGINT: + case Types.FLOAT: + case Types.REAL: + case Types.DOUBLE: + case Types.NUMERIC: + case Types.DECIMAL: + v = "cast(null as decimal)"; + break; + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + v = "cast(null as varchar(255))"; + break; + case Types.DATE: + case Types.TIME: + case Types.TIMESTAMP: + v = "cast(null as timestamp)"; + break; + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + case Types.NULL: + case Types.OTHER: + case Types.JAVA_OBJECT: + case Types.DISTINCT: + case Types.STRUCT: + case Types.ARRAY: + case Types.BLOB: + case Types.CLOB: + case Types.REF: + case Types.DATALINK: + case Types.BOOLEAN: + break; + } + return v; + } + + public String getCreateMultisetTableString() { + return "create multiset table "; + } + + public boolean supportsLobValueChangePropogation() { + return false; + } + + public boolean doesReadCommittedCauseWritersToBlockReaders() { + return true; + } + + public boolean doesRepeatableReadCauseReadersToBlockWriters() { + return true; + } + + public boolean supportsBindAsCallableArgument() { + return false; + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/TimesTenDialect.java b/src/org/hibernate/dialect/TimesTenDialect.java new file mode 100644 index 0000000000..38ea8c86dd --- /dev/null +++ b/src/org/hibernate/dialect/TimesTenDialect.java @@ -0,0 +1,205 @@ +package org.hibernate.dialect; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.LockMode; +import org.hibernate.persister.entity.Lockable; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.function.NoArgSQLFunction; +import org.hibernate.dialect.function.StandardSQLFunction; +import org.hibernate.dialect.lock.LockingStrategy; +import org.hibernate.dialect.lock.UpdateLockingStrategy; +import org.hibernate.dialect.lock.SelectLockingStrategy; +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.OracleJoinFragment; + +/** + * A SQL dialect for TimesTen 5.1. + * + * Known limitations: + * joined-subclass support because of no CASE support in TimesTen + * No support for subqueries that includes aggregation + * - size() in HQL not supported + * - user queries that does subqueries with aggregation + * No CLOB/BLOB support + * No cascade delete support. + * No Calendar support + * No support for updating primary keys. + * + * @author Sherry Listgarten and Max Andersen + */ +public class TimesTenDialect extends Dialect { + + public TimesTenDialect() { + super(); + registerColumnType( Types.BIT, "TINYINT" ); + registerColumnType( Types.BIGINT, "BIGINT" ); + registerColumnType( Types.SMALLINT, "SMALLINT" ); + registerColumnType( Types.TINYINT, "TINYINT" ); + registerColumnType( Types.INTEGER, "INTEGER" ); + registerColumnType( Types.CHAR, "CHAR(1)" ); + registerColumnType( Types.VARCHAR, "VARCHAR($l)" ); + registerColumnType( Types.FLOAT, "FLOAT" ); + registerColumnType( Types.DOUBLE, "DOUBLE" ); + registerColumnType( Types.DATE, "DATE" ); + registerColumnType( Types.TIME, "TIME" ); + registerColumnType( Types.TIMESTAMP, "TIMESTAMP" ); + registerColumnType( Types.VARBINARY, "VARBINARY($l)" ); + registerColumnType( Types.NUMERIC, "DECIMAL($p, $s)" ); + // TimesTen has no BLOB/CLOB support, but these types may be suitable + // for some applications. The length is limited to 4 million bytes. + registerColumnType( Types.BLOB, "VARBINARY(4000000)" ); + registerColumnType( Types.CLOB, "VARCHAR(4000000)" ); + + getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true"); + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE); + registerFunction( "lower", new StandardSQLFunction("lower") ); + registerFunction( "upper", new StandardSQLFunction("upper") ); + registerFunction( "rtrim", new StandardSQLFunction("rtrim") ); + registerFunction( "concat", new StandardSQLFunction("concat", Hibernate.STRING) ); + registerFunction( "mod", new StandardSQLFunction("mod") ); + registerFunction( "to_char", new StandardSQLFunction("to_char",Hibernate.STRING) ); + registerFunction( "to_date", new StandardSQLFunction("to_date",Hibernate.TIMESTAMP) ); + registerFunction( "sysdate", new NoArgSQLFunction("sysdate", Hibernate.TIMESTAMP, false) ); + registerFunction( "getdate", new NoArgSQLFunction("getdate", Hibernate.TIMESTAMP, false) ); + registerFunction( "nvl", new StandardSQLFunction("nvl") ); + + } + + public boolean dropConstraints() { + return true; + } + + public boolean qualifyIndexName() { + return false; + } + + public boolean supportsUnique() { + return false; + } + + public boolean supportsUniqueConstraintInCreateAlterTable() { + return false; + } + + public String getAddColumnString() { + return "add"; + } + + public boolean supportsSequences() { + return true; + } + + public String getSelectSequenceNextValString(String sequenceName) { + return sequenceName + ".nextval"; + } + + public String getSequenceNextValString(String sequenceName) { + return "select first 1 " + sequenceName + ".nextval from sys.tables"; + } + + public String getCreateSequenceString(String sequenceName) { + return "create sequence " + sequenceName; + } + + public String getDropSequenceString(String sequenceName) { + return "drop sequence " + sequenceName; + } + + public String getQuerySequencesString() { + return "select NAME from sys.sequences"; + } + + public JoinFragment createOuterJoinFragment() { + return new OracleJoinFragment(); + } + + // new methods in dialect3 + /*public boolean supportsForUpdateNowait() { + return false; + }*/ + + public String getForUpdateString() { + return ""; + } + + public boolean supportsColumnCheck() { + return false; + } + + public boolean supportsTableCheck() { + return false; + } + + public boolean supportsLimitOffset() { + return false; + } + + public boolean supportsVariableLimit() { + return false; + } + + public boolean supportsLimit() { + return true; + } + + public boolean useMaxForLimit() { + return true; + } + + public String getLimitString(String querySelect, int offset, int limit) { + if ( offset > 0 ) { + throw new UnsupportedOperationException( "TimesTen does not support offset" ); + } + return new StringBuffer( querySelect.length()+8 ) + .append(querySelect) + .insert( 6, " first " + limit ) + .toString(); + } + + public boolean supportsCurrentTimestampSelection() { + return true; + } + + public String getCurrentTimestampSelectString() { + return "select first 1 sysdate from sys.tables"; + } + + public boolean isCurrentTimestampSelectStringCallable() { + return false; + } + + public boolean supportsTemporaryTables() { + return true; + } + + public String generateTemporaryTableName(String baseTableName) { + String name = super.generateTemporaryTableName(baseTableName); + return name.length() > 30 ? name.substring( 1, 30 ) : name; + } + + public String getCreateTemporaryTableString() { + return "create global temporary table"; + } + + public String getCreateTemporaryTablePostfix() { + return "on commit delete rows"; + } + + public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { + // TimesTen has no known variation of a "SELECT ... FOR UPDATE" syntax... + if ( lockMode.greaterThan( LockMode.READ ) ) { + return new UpdateLockingStrategy( lockable, lockMode ); + } + else { + return new SelectLockingStrategy( lockable, lockMode ); + } + } + + // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean supportsEmptyInList() { + return false; + } +} diff --git a/src/org/hibernate/dialect/TypeNames.java b/src/org/hibernate/dialect/TypeNames.java new file mode 100644 index 0000000000..cfcc9ef067 --- /dev/null +++ b/src/org/hibernate/dialect/TypeNames.java @@ -0,0 +1,116 @@ +//$Id$ +package org.hibernate.dialect; + +import java.util.Map; +import java.util.HashMap; +import java.util.TreeMap; +import java.util.Iterator; + +import org.hibernate.MappingException; +import org.hibernate.util.StringHelper; + +/** + * This class maps a type to names. Associations + * may be marked with a capacity. Calling the get() + * method with a type and actual size n will return + * the associated name with smallest capacity >= n, + * if available and an unmarked default type otherwise. + * Eg, setting + *
    + *	names.put(type,        "TEXT" );
    + *	names.put(type,   255, "VARCHAR($l)" );
    + *	names.put(type, 65534, "LONGVARCHAR($l)" );
    + * 
    + * will give you back the following: + *
    + *  names.get(type)         // --> "TEXT" (default)
    + *  names.get(type,    100) // --> "VARCHAR(100)" (100 is in [0:255])
    + *  names.get(type,   1000) // --> "LONGVARCHAR(1000)" (1000 is in [256:65534])
    + *  names.get(type, 100000) // --> "TEXT" (default)
    + * 
    + * On the other hand, simply putting + *
    + *	names.put(type, "VARCHAR($l)" );
    + * 
    + * would result in + *
    + *  names.get(type)        // --> "VARCHAR($l)" (will cause trouble)
    + *  names.get(type, 100)   // --> "VARCHAR(100)"
    + *  names.get(type, 10000) // --> "VARCHAR(10000)"
    + * 
    + * + * @author Christoph Beck + */ +public class TypeNames { + + private HashMap weighted = new HashMap(); + private HashMap defaults = new HashMap(); + + /** + * get default type name for specified type + * @param typecode the type key + * @return the default type name associated with specified key + */ + public String get(int typecode) throws MappingException { + String result = (String) defaults.get( new Integer(typecode) ); + if (result==null) throw new MappingException("No Dialect mapping for JDBC type: " + typecode); + return result; + } + + /** + * get type name for specified type and size + * @param typecode the type key + * @param size the SQL length + * @param scale the SQL scale + * @param precision the SQL precision + * @return the associated name with smallest capacity >= size, + * if available and the default type name otherwise + */ + public String get(int typecode, int size, int precision, int scale) throws MappingException { + Map map = (Map) weighted.get( new Integer(typecode) ); + if ( map!=null && map.size()>0 ) { + // iterate entries ordered by capacity to find first fit + Iterator entries = map.entrySet().iterator(); + while ( entries.hasNext() ) { + Map.Entry entry = (Map.Entry)entries.next(); + if ( size <= ( (Integer) entry.getKey() ).intValue() ) { + return replace( (String) entry.getValue(), size, precision, scale ); + } + } + } + return replace( get(typecode), size, precision, scale ); + } + + private static String replace(String type, int size, int precision, int scale) { + type = StringHelper.replaceOnce(type, "$s", Integer.toString(scale) ); + type = StringHelper.replaceOnce(type, "$l", Integer.toString(size) ); + return StringHelper.replaceOnce(type, "$p", Integer.toString(precision) ); + } + + /** + * set a type name for specified type key and capacity + * @param typecode the type key + */ + public void put(int typecode, int capacity, String value) { + TreeMap map = (TreeMap)weighted.get( new Integer(typecode) ); + if (map == null) {// add new ordered map + map = new TreeMap(); + weighted.put( new Integer(typecode), map ); + } + map.put(new Integer(capacity), value); + } + + /** + * set a default type name for specified type key + * @param typecode the type key + */ + public void put(int typecode, String value) { + defaults.put( new Integer(typecode), value ); + } +} + + + + + + diff --git a/src/org/hibernate/dialect/function/AnsiTrimEmulationFunction.java b/src/org/hibernate/dialect/function/AnsiTrimEmulationFunction.java new file mode 100644 index 0000000000..95de11ad0d --- /dev/null +++ b/src/org/hibernate/dialect/function/AnsiTrimEmulationFunction.java @@ -0,0 +1,146 @@ +package org.hibernate.dialect.function; + +import org.hibernate.Hibernate; +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +import java.util.List; +import java.util.ArrayList; + +/** + * A {@link SQLFunction} implementation that emulates the ANSI SQL trim function + * on dialects which do not support the full definition. However, this function + * definition does assume the availability of ltrim, rtrim, and replace functions + * which it uses in various combinations to emulate the desired ANSI trim() + * functionality. + * + * @author Steve Ebersole + */ +public class AnsiTrimEmulationFunction implements SQLFunction { + + private static final SQLFunction LEADING_SPACE_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "ltrim( ?1 )"); + private static final SQLFunction TRAILING_SPACE_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "rtrim( ?1 )"); + private static final SQLFunction BOTH_SPACE_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "ltrim( rtrim( ?1 ) )"); + private static final SQLFunction BOTH_SPACE_TRIM_FROM = new SQLFunctionTemplate( Hibernate.STRING, "ltrim( rtrim( ?2 ) )"); + + private static final SQLFunction LEADING_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "replace( replace( rtrim( replace( replace( ?1, ' ', '${space}$' ), ?2, ' ' ) ), ' ', ?2 ), '${space}$', ' ' )" ); + private static final SQLFunction TRAILING_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "replace( replace( ltrim( replace( replace( ?1, ' ', '${space}$' ), ?2, ' ' ) ), ' ', ?2 ), '${space}$', ' ' )" ); + private static final SQLFunction BOTH_TRIM = new SQLFunctionTemplate( Hibernate.STRING, "replace( replace( ltrim( rtrim( replace( replace( ?1, ' ', '${space}$' ), ?2, ' ' ) ) ), ' ', ?2 ), '${space}$', ' ' )" ); + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return Hibernate.STRING; + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return false; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + // according to both the ANSI-SQL and EJB3 specs, trim can either take + // exactly one parameter or a variable number of parameters between 1 and 4. + // from the SQL spec: + // + // ::= + // TRIM + // + // ::= + // [ [ ] [ ] FROM ] + // + // ::= + // LEADING + // | TRAILING + // | BOTH + // + // If only is omitted, BOTH is assumed; + // if is omitted, space is assumed + if ( args.size() == 1 ) { + // we have the form: trim(trimSource) + // so we trim leading and trailing spaces + return BOTH_SPACE_TRIM.render( args, factory ); + } + else if ( "from".equalsIgnoreCase( ( String ) args.get( 0 ) ) ) { + // we have the form: trim(from trimSource). + // This is functionally equivalent to trim(trimSource) + return BOTH_SPACE_TRIM_FROM.render( args, factory ); + } + else { + // otherwise, a trim-specification and/or a trim-character + // have been specified; we need to decide which options + // are present and "do the right thing" + boolean leading = true; // should leading trim-characters be trimmed? + boolean trailing = true; // should trailing trim-characters be trimmed? + String trimCharacter = null; // the trim-character + String trimSource = null; // the trim-source + + // potentialTrimCharacterArgIndex = 1 assumes that a + // trim-specification has been specified. we handle the + // exception to that explicitly + int potentialTrimCharacterArgIndex = 1; + String firstArg = ( String ) args.get( 0 ); + if ( "leading".equalsIgnoreCase( firstArg ) ) { + trailing = false; + } + else if ( "trailing".equalsIgnoreCase( firstArg ) ) { + leading = false; + } + else if ( "both".equalsIgnoreCase( firstArg ) ) { + } + else { + potentialTrimCharacterArgIndex = 0; + } + + String potentialTrimCharacter = ( String ) args.get( potentialTrimCharacterArgIndex ); + if ( "from".equalsIgnoreCase( potentialTrimCharacter ) ) { + trimCharacter = "' '"; + trimSource = ( String ) args.get( potentialTrimCharacterArgIndex + 1 ); + } + else if ( potentialTrimCharacterArgIndex + 1 >= args.size() ) { + trimCharacter = "' '"; + trimSource = potentialTrimCharacter; + } + else { + trimCharacter = potentialTrimCharacter; + if ( "from".equalsIgnoreCase( ( String ) args.get( potentialTrimCharacterArgIndex + 1 ) ) ) { + trimSource = ( String ) args.get( potentialTrimCharacterArgIndex + 2 ); + } + else { + trimSource = ( String ) args.get( potentialTrimCharacterArgIndex + 1 ); + } + } + + List argsToUse = null; + argsToUse = new ArrayList(); + argsToUse.add( trimSource ); + argsToUse.add( trimCharacter ); + + if ( trimCharacter.equals( "' '" ) ) { + if ( leading && trailing ) { + return BOTH_SPACE_TRIM.render( argsToUse, factory ); + } + else if ( leading ) { + return LEADING_SPACE_TRIM.render( argsToUse, factory ); + } + else { + return TRAILING_SPACE_TRIM.render( argsToUse, factory ); + } + } + else { + if ( leading && trailing ) { + return BOTH_TRIM.render( argsToUse, factory ); + } + else if ( leading ) { + return LEADING_TRIM.render( argsToUse, factory ); + } + else { + return TRAILING_TRIM.render( argsToUse, factory ); + } + } + } + } +} diff --git a/src/org/hibernate/dialect/function/CastFunction.java b/src/org/hibernate/dialect/function/CastFunction.java new file mode 100755 index 0000000000..8e08d9c423 --- /dev/null +++ b/src/org/hibernate/dialect/function/CastFunction.java @@ -0,0 +1,55 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; + +/** + * ANSI-SQL style cast(foo as type) where the type is + * a Hibernate type + * @author Gavin King + */ +public class CastFunction implements SQLFunction { + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return columnType; //note there is a wierd implementation in the client side + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return true; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + if ( args.size()!=2 ) { + throw new QueryException("cast() requires two arguments"); + } + String type = (String) args.get(1); + int[] sqlTypeCodes = TypeFactory.heuristicType(type).sqlTypes(factory); + if ( sqlTypeCodes.length!=1 ) { + throw new QueryException("invalid Hibernate type for cast()"); + } + String sqlType = factory.getDialect().getCastTypeName( sqlTypeCodes[0] ); + if (sqlType==null) { + //TODO: never reached, since getTypeName() actually throws an exception! + sqlType = type; + } + /*else { + //trim off the length/precision/scale + int loc = sqlType.indexOf('('); + if (loc>-1) { + sqlType = sqlType.substring(0, loc); + } + }*/ + return "cast(" + args.get(0) + " as " + sqlType + ')'; + } + +} diff --git a/src/org/hibernate/dialect/function/CharIndexFunction.java b/src/org/hibernate/dialect/function/CharIndexFunction.java new file mode 100755 index 0000000000..9019f5cbbb --- /dev/null +++ b/src/org/hibernate/dialect/function/CharIndexFunction.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.Hibernate; +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Emulation of locate() on Sybase + * @author Nathan Moon + */ +public class CharIndexFunction implements SQLFunction { + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return Hibernate.INTEGER; + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return true; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + boolean threeArgs = args.size() > 2; + Object pattern = args.get(0); + Object string = args.get(1); + Object start = threeArgs ? args.get(2) : null; + + StringBuffer buf = new StringBuffer(); + buf.append("charindex(").append( pattern ).append(", "); + if (threeArgs) buf.append( "right("); + buf.append( string ); + if (threeArgs) buf.append( ", char_length(" ).append( string ).append(")-(").append( start ).append("-1))"); + buf.append(')'); + return buf.toString(); + } + +} diff --git a/src/org/hibernate/dialect/function/ClassicAvgFunction.java b/src/org/hibernate/dialect/function/ClassicAvgFunction.java new file mode 100644 index 0000000000..9959e1b14b --- /dev/null +++ b/src/org/hibernate/dialect/function/ClassicAvgFunction.java @@ -0,0 +1,42 @@ +/** + * + */ +package org.hibernate.dialect.function; + +import java.sql.Types; + +import org.hibernate.Hibernate; +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.type.Type; + +/** + * Classic AVG sqlfunction that return types as it was done in Hibernate 3.1 + * + * @author Max Rydahl Andersen + * + */ +public class ClassicAvgFunction extends StandardSQLFunction { + public ClassicAvgFunction() { + super( "avg" ); + } + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + int[] sqlTypes; + try { + sqlTypes = columnType.sqlTypes( mapping ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + if ( sqlTypes.length != 1 ) throw new QueryException( "multi-column type in avg()" ); + int sqlType = sqlTypes[0]; + if ( sqlType == Types.INTEGER || sqlType == Types.BIGINT || sqlType == Types.TINYINT ) { + return Hibernate.FLOAT; + } + else { + return columnType; + } + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/function/ClassicCountFunction.java b/src/org/hibernate/dialect/function/ClassicCountFunction.java new file mode 100644 index 0000000000..e68cc78ba2 --- /dev/null +++ b/src/org/hibernate/dialect/function/ClassicCountFunction.java @@ -0,0 +1,25 @@ +/** + * + */ +package org.hibernate.dialect.function; + +import org.hibernate.Hibernate; +import org.hibernate.engine.Mapping; +import org.hibernate.type.Type; + + +/** + * Classic COUNT sqlfunction that return types as it was done in Hibernate 3.1 + * + * @author Max Rydahl Andersen + * + */ +public class ClassicCountFunction extends StandardSQLFunction { + public ClassicCountFunction() { + super( "count" ); + } + + public Type getReturnType(Type columnType, Mapping mapping) { + return Hibernate.INTEGER; + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/function/ClassicSumFunction.java b/src/org/hibernate/dialect/function/ClassicSumFunction.java new file mode 100644 index 0000000000..53833079f5 --- /dev/null +++ b/src/org/hibernate/dialect/function/ClassicSumFunction.java @@ -0,0 +1,17 @@ +/** + * + */ +package org.hibernate.dialect.function; + + +/** + * Classic SUM sqlfunction that return types as it was done in Hibernate 3.1 + * + * @author Max Rydahl Andersen + * + */ +public class ClassicSumFunction extends StandardSQLFunction { + public ClassicSumFunction() { + super( "sum" ); + } +} \ No newline at end of file diff --git a/src/org/hibernate/dialect/function/ConditionalParenthesisFunction.java b/src/org/hibernate/dialect/function/ConditionalParenthesisFunction.java new file mode 100644 index 0000000000..41c9907844 --- /dev/null +++ b/src/org/hibernate/dialect/function/ConditionalParenthesisFunction.java @@ -0,0 +1,45 @@ +//$Id: ConditionalParenthesisFunction.java,v 1.4 2005/04/26 18:08:01 oneovthafew Exp $ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Essentially the same as {@link org.hibernate.dialect.function.StandardSQLFunction}, + * except that here the parentheses are not included when no arguments are given. + * + * @author Jonathan Levinson + */ +public class ConditionalParenthesisFunction extends StandardSQLFunction { + + public ConditionalParenthesisFunction(String name) { + super( name ); + } + + public ConditionalParenthesisFunction(String name, Type type) { + super( name, type ); + } + + public boolean hasParenthesesIfNoArguments() { + return false; + } + + public String render(List args, SessionFactoryImplementor factory) { + final boolean hasArgs = !args.isEmpty(); + StringBuffer buf = new StringBuffer(); + buf.append( getName() ); + if ( hasArgs ) { + buf.append( "(" ); + for ( int i = 0; i < args.size(); i++ ) { + buf.append( args.get( i ) ); + if ( i < args.size() - 1 ) { + buf.append( ", " ); + } + } + buf.append( ")" ); + } + return buf.toString(); + } +} diff --git a/src/org/hibernate/dialect/function/ConvertFunction.java b/src/org/hibernate/dialect/function/ConvertFunction.java new file mode 100644 index 0000000000..6a12132117 --- /dev/null +++ b/src/org/hibernate/dialect/function/ConvertFunction.java @@ -0,0 +1,45 @@ +//$Id: CastFunction.java 7368 2005-07-04 02:54:27Z oneovthafew $ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.QueryException; +import org.hibernate.Hibernate; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * A Caché defintion of a convert function. + * + * @author Jonathan Levinson + */ +public class ConvertFunction implements SQLFunction { + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return Hibernate.STRING; + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return true; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + if ( args.size() != 2 && args.size() != 3 ) { + throw new QueryException( "convert() requires two or three arguments" ); + } + String type = ( String ) args.get( 1 ); + + if ( args.size() == 2 ) { + return "{fn convert(" + args.get( 0 ) + " , " + type + ")}"; + } + else { + return "convert(" + args.get( 0 ) + " , " + type + "," + args.get( 2 ) + ")"; + } + } + +} diff --git a/src/org/hibernate/dialect/function/NoArgSQLFunction.java b/src/org/hibernate/dialect/function/NoArgSQLFunction.java new file mode 100644 index 0000000000..e1984de636 --- /dev/null +++ b/src/org/hibernate/dialect/function/NoArgSQLFunction.java @@ -0,0 +1,49 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + + +/** + * A function which takes no arguments + * @author Michi + */ +public class NoArgSQLFunction implements SQLFunction { + private Type returnType; + private boolean hasParenthesesIfNoArguments; + private String name; + + public NoArgSQLFunction(String name, Type returnType) { + this(name, returnType, true); + } + + public NoArgSQLFunction(String name, Type returnType, boolean hasParenthesesIfNoArguments) { + this.returnType = returnType; + this.hasParenthesesIfNoArguments = hasParenthesesIfNoArguments; + this.name = name; + } + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return returnType; + } + + public boolean hasArguments() { + return false; + } + + public boolean hasParenthesesIfNoArguments() { + return hasParenthesesIfNoArguments; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + if ( args.size()>0 ) { + throw new QueryException("function takes no arguments: " + name); + } + return hasParenthesesIfNoArguments ? name + "()" : name; + } +} diff --git a/src/org/hibernate/dialect/function/NvlFunction.java b/src/org/hibernate/dialect/function/NvlFunction.java new file mode 100755 index 0000000000..7349e1c2aa --- /dev/null +++ b/src/org/hibernate/dialect/function/NvlFunction.java @@ -0,0 +1,42 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Emulation of coalesce() on Oracle, using multiple + * nvl() calls + * @author Gavin King + */ +public class NvlFunction implements SQLFunction { + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return columnType; + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return true; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + int lastIndex = args.size()-1; + Object last = args.remove(lastIndex); + if ( lastIndex==0 ) return last.toString(); + Object secondLast = args.get(lastIndex-1); + String nvl = "nvl(" + secondLast + ", " + last + ")"; + args.set(lastIndex-1, nvl); + return render(args, factory); + } + + + +} diff --git a/src/org/hibernate/dialect/function/PositionSubstringFunction.java b/src/org/hibernate/dialect/function/PositionSubstringFunction.java new file mode 100755 index 0000000000..4edf8bcf73 --- /dev/null +++ b/src/org/hibernate/dialect/function/PositionSubstringFunction.java @@ -0,0 +1,48 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.Hibernate; +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Emulation of locate() on PostgreSQL + * @author Gavin King + */ +public class PositionSubstringFunction implements SQLFunction { + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return Hibernate.INTEGER; + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return true; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + boolean threeArgs = args.size() > 2; + Object pattern = args.get(0); + Object string = args.get(1); + Object start = threeArgs ? args.get(2) : null; + + StringBuffer buf = new StringBuffer(); + if (threeArgs) buf.append('('); + buf.append("position(").append( pattern ).append(" in "); + if (threeArgs) buf.append( "substring("); + buf.append( string ); + if (threeArgs) buf.append( ", " ).append( start ).append(')'); + buf.append(')'); + if (threeArgs) buf.append('+').append( start ).append("-1)"); + return buf.toString(); + } + + +} diff --git a/src/org/hibernate/dialect/function/SQLFunction.java b/src/org/hibernate/dialect/function/SQLFunction.java new file mode 100644 index 0000000000..8259c26741 --- /dev/null +++ b/src/org/hibernate/dialect/function/SQLFunction.java @@ -0,0 +1,58 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Provides support routines for the HQL functions as used + * in the various SQL Dialects + * + * Provides an interface for supporting various HQL functions that are + * translated to SQL. The Dialect and its sub-classes use this interface to + * provide details required for processing of the function. + * + * @author David Channon + */ +public interface SQLFunction { + /** + * The return type of the function. May be either a concrete type which + * is preset, or variable depending upon the type of the first function + * argument. + * + * @param columnType the type of the first argument + * @param mapping The mapping source. + * @return The type to be expected as a return. + * @throws org.hibernate.QueryException Indicates an issue resolving the return type. + */ + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException; + + /** + * Does this function have any arguments? + * + * @return True if the function expects to have parameters; false otherwise. + */ + public boolean hasArguments(); + + /** + * If there are no arguments, are parens required? + * + * @return True if a no-arg call of this function requires parentheses. + */ + public boolean hasParenthesesIfNoArguments(); + + /** + * Render the function call as SQL fragment. + * + * @param args The function arguments + * @param factory The SessionFactory + * @return The rendered function call + * @throws org.hibernate.QueryException Indicates a problem rendering the + * function call. + */ + public String render(List args, SessionFactoryImplementor factory) throws QueryException; +} diff --git a/src/org/hibernate/dialect/function/SQLFunctionRegistry.java b/src/org/hibernate/dialect/function/SQLFunctionRegistry.java new file mode 100644 index 0000000000..227cce395a --- /dev/null +++ b/src/org/hibernate/dialect/function/SQLFunctionRegistry.java @@ -0,0 +1,33 @@ +package org.hibernate.dialect.function; + +import java.util.HashMap; +import java.util.Map; + +import org.hibernate.dialect.Dialect; + +public class SQLFunctionRegistry { + + private final Dialect dialect; + private final Map userFunctions; + + public SQLFunctionRegistry(Dialect dialect, Map userFunctions) { + this.dialect = dialect; + this.userFunctions = new HashMap(); + this.userFunctions.putAll( userFunctions ); + } + + public SQLFunction findSQLFunction(String functionName) { + String name = functionName.toLowerCase(); + SQLFunction userFunction = (SQLFunction) userFunctions.get( name ); + + return userFunction!=null?userFunction:(SQLFunction) dialect.getFunctions().get(name); // TODO: lowercasing done here. Was done "at random" before; maybe not needed at all ? + } + + public boolean hasFunction(String functionName) { + String name = functionName.toLowerCase(); + boolean hasUserFunction = userFunctions.containsKey ( name ); + + return hasUserFunction || dialect.getFunctions().containsKey ( name ); // TODO: toLowerCase was not done before. Only used in Template. + } + +} diff --git a/src/org/hibernate/dialect/function/SQLFunctionTemplate.java b/src/org/hibernate/dialect/function/SQLFunctionTemplate.java new file mode 100755 index 0000000000..c221aecdef --- /dev/null +++ b/src/org/hibernate/dialect/function/SQLFunctionTemplate.java @@ -0,0 +1,123 @@ +//$Id$ +package org.hibernate.dialect.function; + +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +import java.util.ArrayList; +import java.util.List; + +/** + * Represents HQL functions that can have different representations in different SQL dialects. + * E.g. in HQL we can define function concat(?1, ?2) to concatenate two strings + * p1 and p2. Target SQL function will be dialect-specific, e.g. (?1 || ?2) for + * Oracle, concat(?1, ?2) for MySql, (?1 + ?2) for MS SQL. + * Each dialect will define a template as a string (exactly like above) marking function + * parameters with '?' followed by parameter's index (first index is 1). + * + * @author Alexey Loubyansky + * @version $Revision$ + */ +public class SQLFunctionTemplate implements SQLFunction { + private final Type type; + private final boolean hasArguments; + private final boolean hasParenthesesIfNoArgs; + + private final String template; + private final String[] chunks; + private final int[] paramIndexes; + + public SQLFunctionTemplate(Type type, String template) { + this( type, template, true ); + } + + public SQLFunctionTemplate(Type type, String template, boolean hasParenthesesIfNoArgs) { + this.type = type; + this.template = template; + + List chunkList = new ArrayList(); + List paramList = new ArrayList(); + StringBuffer chunk = new StringBuffer( 10 ); + StringBuffer index = new StringBuffer( 2 ); + + for ( int i = 0; i < template.length(); ++i ) { + char c = template.charAt( i ); + if ( c == '?' ) { + chunkList.add( chunk.toString() ); + chunk.delete( 0, chunk.length() ); + + while ( ++i < template.length() ) { + c = template.charAt( i ); + if ( Character.isDigit( c ) ) { + index.append( c ); + } + else { + chunk.append( c ); + break; + } + } + + paramList.add( new Integer( Integer.parseInt( index.toString() ) - 1 ) ); + index.delete( 0, index.length() ); + } + else { + chunk.append( c ); + } + } + + if ( chunk.length() > 0 ) { + chunkList.add( chunk.toString() ); + } + + chunks = ( String[] ) chunkList.toArray( new String[chunkList.size()] ); + paramIndexes = new int[paramList.size()]; + for ( int i = 0; i < paramIndexes.length; ++i ) { + paramIndexes[i] = ( ( Integer ) paramList.get( i ) ).intValue(); + } + + hasArguments = paramIndexes.length > 0; + this.hasParenthesesIfNoArgs = hasParenthesesIfNoArgs; + } + + /** + * Applies the template to passed in arguments. + * @param args function arguments + * + * @return generated SQL function call + */ + public String render(List args, SessionFactoryImplementor factory) { + StringBuffer buf = new StringBuffer(); + for ( int i = 0; i < chunks.length; ++i ) { + if ( i < paramIndexes.length ) { + Object arg = paramIndexes[i] < args.size() ? args.get( paramIndexes[i] ) : null; + if ( arg != null ) { + buf.append( chunks[i] ).append( arg ); + } + } + else { + buf.append( chunks[i] ); + } + } + return buf.toString(); + } + + // SQLFunction implementation + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return type; + } + + public boolean hasArguments() { + return hasArguments; + } + + public boolean hasParenthesesIfNoArguments() { + return hasParenthesesIfNoArgs; + } + + public String toString() { + return template; + } +} diff --git a/src/org/hibernate/dialect/function/StandardJDBCEscapeFunction.java b/src/org/hibernate/dialect/function/StandardJDBCEscapeFunction.java new file mode 100644 index 0000000000..bf19a8e5b3 --- /dev/null +++ b/src/org/hibernate/dialect/function/StandardJDBCEscapeFunction.java @@ -0,0 +1,31 @@ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.type.Type; +import org.hibernate.engine.SessionFactoryImplementor; + +/** + * Analogous to {@link org.hibernate.dialect.function.StandardSQLFunction} + * except that standard JDBC escape sequences (i.e. {fn blah}) are used when + * rendering the SQL. + * + * @author Steve Ebersole + */ +public class StandardJDBCEscapeFunction extends StandardSQLFunction { + public StandardJDBCEscapeFunction(String name) { + super( name ); + } + + public StandardJDBCEscapeFunction(String name, Type typeValue) { + super( name, typeValue ); + } + + public String render(List args, SessionFactoryImplementor factory) { + return "{fn " + super.render( args, factory ) + "}"; + } + + public String toString() { + return "{fn " + getName() + "...}"; + } +} diff --git a/src/org/hibernate/dialect/function/StandardSQLFunction.java b/src/org/hibernate/dialect/function/StandardSQLFunction.java new file mode 100644 index 0000000000..b9191f82cb --- /dev/null +++ b/src/org/hibernate/dialect/function/StandardSQLFunction.java @@ -0,0 +1,107 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Provides a standard implementation that supports the majority of the HQL + * functions that are translated to SQL. The Dialect and its sub-classes use + * this class to provide details required for processing of the associated + * function. + * + * @author David Channon + */ +public class StandardSQLFunction implements SQLFunction { + private final String name; + private final Type type; + + /** + * Construct a standard SQL function definition with a variable return type; + * the actual return type will depend on the types to which the function + * is applied. + *

    + * Using this form, the return type is considered non-static and assumed + * to be the type of the first argument. + * + * @param name The name of the function. + */ + public StandardSQLFunction(String name) { + this( name, null ); + } + + /** + * Construct a standard SQL function definition with a static return type. + * + * @param name The name of the function. + * @param type The static return type. + */ + public StandardSQLFunction(String name, Type type) { + this.name = name; + this.type = type; + } + + /** + * Function name accessor + * + * @return The function name. + */ + public String getName() { + return name; + } + + /** + * Function static return type accessor. + * + * @return The static function return type; or null if return type is + * not static. + */ + public Type getType() { + return type; + } + + /** + * {@inheritDoc} + */ + public Type getReturnType(Type columnType, Mapping mapping) { + // return the concrete type, or the underlying type if a concrete type + // was not specified + return type == null ? columnType : type; + } + + /** + * {@inheritDoc} + */ + public boolean hasArguments() { + return true; + } + + /** + * {@inheritDoc} + */ + public boolean hasParenthesesIfNoArguments() { + return true; + } + + /** + * {@inheritDoc} + */ + public String render(List args, SessionFactoryImplementor factory) { + StringBuffer buf = new StringBuffer(); + buf.append( name ).append( '(' ); + for ( int i = 0; i < args.size(); i++ ) { + buf.append( args.get( i ) ); + if ( i < args.size() - 1 ) { + buf.append( ", " ); + } + } + return buf.append( ')' ).toString(); + } + + public String toString() { + return name; + } +} diff --git a/src/org/hibernate/dialect/function/VarArgsSQLFunction.java b/src/org/hibernate/dialect/function/VarArgsSQLFunction.java new file mode 100755 index 0000000000..a1b69aee53 --- /dev/null +++ b/src/org/hibernate/dialect/function/VarArgsSQLFunction.java @@ -0,0 +1,58 @@ +//$Id$ +package org.hibernate.dialect.function; + +import java.util.List; + +import org.hibernate.QueryException; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Support for slightly more general templating than StandardSQLFunction, + * with an unlimited number of arguments. + * @author Gavin King + */ +public class VarArgsSQLFunction implements SQLFunction { + + private final String begin; + private final String sep; + private final String end; + private final Type type; + + public VarArgsSQLFunction(Type type, String begin, String sep, String end) { + this.begin = begin; + this.sep = sep; + this.end = end; + this.type = type; + } + + public VarArgsSQLFunction(String begin, String sep, String end) { + this.begin = begin; + this.sep = sep; + this.end = end; + this.type = null; + } + + public Type getReturnType(Type columnType, Mapping mapping) throws QueryException { + return type==null ? columnType : type; + } + + public boolean hasArguments() { + return true; + } + + public boolean hasParenthesesIfNoArguments() { + return true; + } + + public String render(List args, SessionFactoryImplementor factory) throws QueryException { + StringBuffer buf = new StringBuffer().append(begin); + for ( int i=0; i + + +

    + A framework for defining database-specific SQL functions + that are available via the dialect. +

    + + diff --git a/src/org/hibernate/dialect/lock/LockingStrategy.java b/src/org/hibernate/dialect/lock/LockingStrategy.java new file mode 100644 index 0000000000..1477a5bcd2 --- /dev/null +++ b/src/org/hibernate/dialect/lock/LockingStrategy.java @@ -0,0 +1,37 @@ +package org.hibernate.dialect.lock; + +import org.hibernate.engine.SessionImplementor; +import org.hibernate.StaleObjectStateException; +import org.hibernate.JDBCException; + +import java.io.Serializable; + +/** + * A strategy abstraction for how locks are obtained in the underlying database. + *

    + * All locking provided implemenations assume the underlying database supports + * (and that the connection is in) at least read-committed transaction isolation. + * The most glaring exclusion to this is HSQLDB which only offers support for + * READ_UNCOMMITTED isolation. + * + * @see org.hibernate.dialect.Dialect#getLockingStrategy + * @since 3.2 + * + * @author Steve Ebersole + */ +public interface LockingStrategy { + /** + * Acquire an appropriate type of lock on the underlying data that will + * endure until the end of the current transaction. + * + * @param id The id of the row to be locked + * @param version The current version (or null if not versioned) + * @param object The object logically being locked (currently not used) + * @param session The session from which the lock request originated + * @throws StaleObjectStateException Indicates an optimisitic lock failure + * as part of acquiring the requested database lock. + * @throws JDBCException + */ + public void lock(Serializable id, Object version, Object object, SessionImplementor session) + throws StaleObjectStateException, JDBCException; +} diff --git a/src/org/hibernate/dialect/lock/SelectLockingStrategy.java b/src/org/hibernate/dialect/lock/SelectLockingStrategy.java new file mode 100644 index 0000000000..67c2ba8a2b --- /dev/null +++ b/src/org/hibernate/dialect/lock/SelectLockingStrategy.java @@ -0,0 +1,119 @@ +package org.hibernate.dialect.lock; + +import org.hibernate.persister.entity.Lockable; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.StaleObjectStateException; +import org.hibernate.JDBCException; +import org.hibernate.LockMode; +import org.hibernate.sql.SimpleSelect; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.exception.JDBCExceptionHelper; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +/** + * A locking strategy where the locks are obtained through select statements. + *

    + * For non-read locks, this is achieved through the Dialect's specific + * SELECT ... FOR UPDATE syntax. + * + * @see org.hibernate.dialect.Dialect#getForUpdateString(org.hibernate.LockMode) + * @see org.hibernate.dialect.Dialect#appendLockHint(org.hibernate.LockMode, String) + * @since 3.2 + * + * @author Steve Ebersole + */ +public class SelectLockingStrategy implements LockingStrategy { + + private final Lockable lockable; + private final LockMode lockMode; + private final String sql; + + /** + * Construct a locking strategy based on SQL SELECT statements. + * + * @param lockable The metadata for the entity to be locked. + * @param lockMode Indictates the type of lock to be acquired. + */ + public SelectLockingStrategy(Lockable lockable, LockMode lockMode) { + this.lockable = lockable; + this.lockMode = lockMode; + this.sql = generateLockString(); + } + + /** + * @see LockingStrategy#lock + */ + public void lock( + Serializable id, + Object version, + Object object, + SessionImplementor session) throws StaleObjectStateException, JDBCException { + + SessionFactoryImplementor factory = session.getFactory(); + try { + PreparedStatement st = session.getBatcher().prepareSelectStatement( sql ); + try { + lockable.getIdentifierType().nullSafeSet( st, id, 1, session ); + if ( lockable.isVersioned() ) { + lockable.getVersionType().nullSafeSet( + st, + version, + lockable.getIdentifierType().getColumnSpan( factory ) + 1, + session + ); + } + + ResultSet rs = st.executeQuery(); + try { + if ( !rs.next() ) { + if ( factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor() + .optimisticFailure( lockable.getEntityName() ); + } + throw new StaleObjectStateException( lockable.getEntityName(), id ); + } + } + finally { + rs.close(); + } + } + finally { + session.getBatcher().closeStatement( st ); + } + + } + catch ( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not lock: " + MessageHelper.infoString( lockable, id, session.getFactory() ), + sql + ); + } + } + + protected LockMode getLockMode() { + return lockMode; + } + + protected String generateLockString() { + SessionFactoryImplementor factory = lockable.getFactory(); + SimpleSelect select = new SimpleSelect( factory.getDialect() ) + .setLockMode( lockMode ) + .setTableName( lockable.getRootTableName() ) + .addColumn( lockable.getRootTableIdentifierColumnNames()[0] ) + .addCondition( lockable.getRootTableIdentifierColumnNames(), "=?" ); + if ( lockable.isVersioned() ) { + select.addCondition( lockable.getVersionColumnName(), "=?" ); + } + if ( factory.getSettings().isCommentsEnabled() ) { + select.setComment( lockMode + " lock " + lockable.getEntityName() ); + } + return select.toStatementString(); + } +} diff --git a/src/org/hibernate/dialect/lock/UpdateLockingStrategy.java b/src/org/hibernate/dialect/lock/UpdateLockingStrategy.java new file mode 100644 index 0000000000..ae5d488709 --- /dev/null +++ b/src/org/hibernate/dialect/lock/UpdateLockingStrategy.java @@ -0,0 +1,122 @@ +package org.hibernate.dialect.lock; + +import org.hibernate.persister.entity.Lockable; +import org.hibernate.LockMode; +import org.hibernate.HibernateException; +import org.hibernate.StaleObjectStateException; +import org.hibernate.JDBCException; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.sql.Update; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.SessionFactoryImplementor; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.SQLException; + +/** + * A locking strategy where the locks are obtained through update statements. + *

    + * This strategy is not valid for read style locks. + * + * @since 3.2 + * + * @author Steve Ebersole + */ +public class UpdateLockingStrategy implements LockingStrategy { + private static final Log log = LogFactory.getLog( UpdateLockingStrategy.class ); + + private final Lockable lockable; + private final LockMode lockMode; + private final String sql; + + /** + * Construct a locking strategy based on SQL UPDATE statements. + * + * @param lockable The metadata for the entity to be locked. + * @param lockMode Indictates the type of lock to be acquired. Note that + * read-locks are not valid for this strategy. + */ + public UpdateLockingStrategy(Lockable lockable, LockMode lockMode) { + this.lockable = lockable; + this.lockMode = lockMode; + if ( lockMode.lessThan( LockMode.UPGRADE ) ) { + throw new HibernateException( "[" + lockMode + "] not valid for update statement" ); + } + if ( !lockable.isVersioned() ) { + log.warn( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]" ); + this.sql = null; + } + else { + this.sql = generateLockString(); + } + } + + /** + * @see LockingStrategy#lock + */ + public void lock( + Serializable id, + Object version, + Object object, + SessionImplementor session) throws StaleObjectStateException, JDBCException { + if ( !lockable.isVersioned() ) { + throw new HibernateException( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]" ); + } + // todo : should we additionally check the current isolation mode explicitly? + SessionFactoryImplementor factory = session.getFactory(); + try { + PreparedStatement st = session.getBatcher().prepareSelectStatement( sql ); + try { + lockable.getVersionType().nullSafeSet( st, version, 1, session ); + int offset = 2; + + lockable.getIdentifierType().nullSafeSet( st, id, offset, session ); + offset += lockable.getIdentifierType().getColumnSpan( factory ); + + if ( lockable.isVersioned() ) { + lockable.getVersionType().nullSafeSet( st, version, offset, session ); + } + + int affected = st.executeUpdate(); + if ( affected < 0 ) { + factory.getStatisticsImplementor().optimisticFailure( lockable.getEntityName() ); + throw new StaleObjectStateException( lockable.getEntityName(), id ); + } + + } + finally { + session.getBatcher().closeStatement( st ); + } + + } + catch ( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not lock: " + MessageHelper.infoString( lockable, id, session.getFactory() ), + sql + ); + } + } + + protected String generateLockString() { + SessionFactoryImplementor factory = lockable.getFactory(); + Update update = new Update( factory.getDialect() ); + update.setTableName( lockable.getRootTableName() ); + update.setPrimaryKeyColumnNames( lockable.getRootTableIdentifierColumnNames() ); + update.setVersionColumnName( lockable.getVersionColumnName() ); + update.addColumn( lockable.getVersionColumnName() ); + if ( factory.getSettings().isCommentsEnabled() ) { + update.setComment( lockMode + " lock " + lockable.getEntityName() ); + } + return update.toStatementString(); + } + + protected LockMode getLockMode() { + return lockMode; + } +} diff --git a/src/org/hibernate/dialect/package.html b/src/org/hibernate/dialect/package.html new file mode 100755 index 0000000000..734e14747f --- /dev/null +++ b/src/org/hibernate/dialect/package.html @@ -0,0 +1,11 @@ + + + +

    + This package abstracts the SQL dialect of the underlying database. +

    +

    + A concrete Dialect may be specifed using hibernate.dialect. +

    + + diff --git a/src/org/hibernate/engine/ActionQueue.java b/src/org/hibernate/engine/ActionQueue.java new file mode 100644 index 0000000000..e17aa38f99 --- /dev/null +++ b/src/org/hibernate/engine/ActionQueue.java @@ -0,0 +1,530 @@ +// $Id$ +package org.hibernate.engine; + +import org.hibernate.action.EntityInsertAction; +import org.hibernate.action.EntityDeleteAction; +import org.hibernate.action.Executable; +import org.hibernate.action.EntityUpdateAction; +import org.hibernate.action.CollectionRecreateAction; +import org.hibernate.action.CollectionRemoveAction; +import org.hibernate.action.CollectionUpdateAction; +import org.hibernate.action.EntityIdentityInsertAction; +import org.hibernate.action.BulkOperationCleanupAction; +import org.hibernate.HibernateException; +import org.hibernate.AssertionFailure; +import org.hibernate.cache.CacheException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.HashMap; +import java.util.Iterator; +import java.io.ObjectInputStream; +import java.io.IOException; +import java.io.Serializable; +import java.io.ObjectOutputStream; + +/** + * Responsible for maintaining the queue of actions related to events. + *

    + * The ActionQueue holds the DML operations queued as part of a session's + * transactional-write-behind semantics. DML operations are queued here + * until a flush forces them to be executed against the database. + * + * @author Steve Ebersole + */ +public class ActionQueue { + + private static final Log log = LogFactory.getLog( ActionQueue.class ); + private static final int INIT_QUEUE_LIST_SIZE = 5; + + private SessionImplementor session; + + // Object insertions, updates, and deletions have list semantics because + // they must happen in the right order so as to respect referential + // integrity + private ArrayList insertions; + private ArrayList deletions; + private ArrayList updates; + // Actually the semantics of the next three are really "Bag" + // Note that, unlike objects, collection insertions, updates, + // deletions are not really remembered between flushes. We + // just re-use the same Lists for convenience. + private ArrayList collectionCreations; + private ArrayList collectionUpdates; + private ArrayList collectionRemovals; + + private ArrayList executions; + + /** + * Constructs an action queue bound to the given session. + * + * @param session The session "owning" this queue. + */ + public ActionQueue(SessionImplementor session) { + this.session = session; + init(); + } + + private void init() { + insertions = new ArrayList( INIT_QUEUE_LIST_SIZE ); + deletions = new ArrayList( INIT_QUEUE_LIST_SIZE ); + updates = new ArrayList( INIT_QUEUE_LIST_SIZE ); + + collectionCreations = new ArrayList( INIT_QUEUE_LIST_SIZE ); + collectionRemovals = new ArrayList( INIT_QUEUE_LIST_SIZE ); + collectionUpdates = new ArrayList( INIT_QUEUE_LIST_SIZE ); + + executions = new ArrayList( INIT_QUEUE_LIST_SIZE * 3 ); + } + + public void clear() { + updates.clear(); + insertions.clear(); + deletions.clear(); + + collectionCreations.clear(); + collectionRemovals.clear(); + collectionUpdates.clear(); + } + + public void addAction(EntityInsertAction action) { + insertions.add( action ); + } + + public void addAction(EntityDeleteAction action) { + deletions.add( action ); + } + + public void addAction(EntityUpdateAction action) { + updates.add( action ); + } + + public void addAction(CollectionRecreateAction action) { + collectionCreations.add( action ); + } + + public void addAction(CollectionRemoveAction action) { + collectionRemovals.add( action ); + } + + public void addAction(CollectionUpdateAction action) { + collectionUpdates.add( action ); + } + + public void addAction(EntityIdentityInsertAction insert) { + insertions.add( insert ); + } + + public void addAction(BulkOperationCleanupAction cleanupAction) { + // Add these directly to the executions queue + executions.add( cleanupAction ); + } + + /** + * Perform all currently queued entity-insertion actions. + * + * @throws HibernateException error executing queued insertion actions. + */ + public void executeInserts() throws HibernateException { + executeActions( insertions ); + } + + /** + * Perform all currently queued actions. + * + * @throws HibernateException error executing queued actions. + */ + public void executeActions() throws HibernateException { + executeActions( insertions ); + executeActions( updates ); + executeActions( collectionRemovals ); + executeActions( collectionUpdates ); + executeActions( collectionCreations ); + executeActions( deletions ); + } + + /** + * Prepares the internal action queues for execution. + * + * @throws HibernateException error preparing actions. + */ + public void prepareActions() throws HibernateException { + prepareActions( collectionRemovals ); + prepareActions( collectionUpdates ); + prepareActions( collectionCreations ); + } + + /** + * Performs cleanup of any held cache softlocks. + * + * @param success Was the transaction successful. + */ + public void afterTransactionCompletion(boolean success) { + int size = executions.size(); + final boolean invalidateQueryCache = session.getFactory().getSettings().isQueryCacheEnabled(); + for ( int i = 0; i < size; i++ ) { + try { + Executable exec = ( Executable ) executions.get(i); + try { + exec.afterTransactionCompletion( success ); + } + finally { + if ( invalidateQueryCache ) { + session.getFactory().getUpdateTimestampsCache().invalidate( exec.getPropertySpaces() ); + } + } + } + catch (CacheException ce) { + log.error( "could not release a cache lock", ce ); + // continue loop + } + catch (Exception e) { + throw new AssertionFailure( "Exception releasing cache locks", e ); + } + } + executions.clear(); + } + + /** + * Check whether the given tables/query-spaces are to be executed against + * given the currently queued actions. + * + * @param tables The table/query-spaces to check. + * @return True if we contain pending actions against any of the given + * tables; false otherwise. + */ + public boolean areTablesToBeUpdated(Set tables) { + return areTablesToUpdated( updates, tables ) || + areTablesToUpdated( insertions, tables ) || + areTablesToUpdated( deletions, tables ) || + areTablesToUpdated( collectionUpdates, tables ) || + areTablesToUpdated( collectionCreations, tables ) || + areTablesToUpdated( collectionRemovals, tables ); + } + + /** + * Check whether any insertion or deletion actions are currently queued. + * + * @return True if insertions or deletions are currently queued; false otherwise. + */ + public boolean areInsertionsOrDeletionsQueued() { + return ( insertions.size() > 0 || deletions.size() > 0 ); + } + + private static boolean areTablesToUpdated(List executables, Set tablespaces) { + int size = executables.size(); + for ( int j = 0; j < size; j++ ) { + Serializable[] spaces = ( (Executable) executables.get(j) ).getPropertySpaces(); + for ( int i = 0; i < spaces.length; i++ ) { + if ( tablespaces.contains( spaces[i] ) ) { + if ( log.isDebugEnabled() ) log.debug( "changes must be flushed to space: " + spaces[i] ); + return true; + } + } + } + return false; + } + + private void executeActions(List list) throws HibernateException { + int size = list.size(); + for ( int i = 0; i < size; i++ ) { + execute( (Executable) list.get(i) ); + } + list.clear(); + session.getBatcher().executeBatch(); + } + + public void execute(Executable executable) { + final boolean lockQueryCache = session.getFactory().getSettings().isQueryCacheEnabled(); + if ( executable.hasAfterTransactionCompletion() || lockQueryCache ) { + executions.add( executable ); + } + if (lockQueryCache) { + session.getFactory() + .getUpdateTimestampsCache() + .preinvalidate( executable.getPropertySpaces() ); + } + executable.execute(); + } + + private void prepareActions(List queue) throws HibernateException { + int size = queue.size(); + for ( int i=0; i lastPos ) { + // 'checkAction' is inserting an entity upon which 'action' + // depends... + // note: this is an assumption and may not be correct in the case of one-to-one + ArrayList segmentedActionQueue = new ArrayList(); + segmentedActionQueue.add( action ); + nameList.add( thisEntityName ); + positionToAction.put(new Integer( nameList.lastIndexOf( thisEntityName ) ), segmentedActionQueue ); + continue loopInsertion; + } + } + } + } + + ArrayList actionQueue = ( ArrayList ) positionToAction.get( new Integer( lastPos ) ); + actionQueue.add( action ); + } + } + + // now iterate back through positionToAction map and move entityInsertAction back to insertion list + for ( int p = 0; p < nameList.size(); p++ ) { + ArrayList actionQueue = ( ArrayList ) positionToAction.get( new Integer( p ) ); + Iterator itr = actionQueue.iterator(); + while ( itr.hasNext() ) { + insertions.add( itr.next() ); + } + } + } + + public ArrayList cloneDeletions() { + return (ArrayList) deletions.clone(); + } + + public void clearFromFlushNeededCheck(int previousCollectionRemovalSize) { + collectionCreations.clear(); + collectionUpdates.clear(); + updates.clear(); + // collection deletions are a special case since update() can add + // deletions of collections not loaded by the session. + for ( int i = collectionRemovals.size()-1; i >= previousCollectionRemovalSize; i-- ) { + collectionRemovals.remove(i); + } + } + + public boolean hasAnyQueuedActions() { + return updates.size() > 0 || + insertions.size() > 0 || + deletions.size() > 0 || + collectionUpdates.size() > 0 || + collectionRemovals.size() > 0 || + collectionCreations.size() > 0; + } + + /** + * Used by the owning session to explicitly control serialization of the + * action queue + * + * @param oos The stream to which the action queue should get written + * @throws IOException + */ + public void serialize(ObjectOutputStream oos) throws IOException { + log.trace( "serializing action-queue" ); + + int queueSize = insertions.size(); + log.trace( "starting serialization of [" + queueSize + "] insertions entries" ); + oos.writeInt( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + oos.writeObject( insertions.get( i ) ); + } + + queueSize = deletions.size(); + log.trace( "starting serialization of [" + queueSize + "] deletions entries" ); + oos.writeInt( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + oos.writeObject( deletions.get( i ) ); + } + + queueSize = updates.size(); + log.trace( "starting serialization of [" + queueSize + "] updates entries" ); + oos.writeInt( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + oos.writeObject( updates.get( i ) ); + } + + queueSize = collectionUpdates.size(); + log.trace( "starting serialization of [" + queueSize + "] collectionUpdates entries" ); + oos.writeInt( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + oos.writeObject( collectionUpdates.get( i ) ); + } + + queueSize = collectionRemovals.size(); + log.trace( "starting serialization of [" + queueSize + "] collectionRemovals entries" ); + oos.writeInt( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + oos.writeObject( collectionRemovals.get( i ) ); + } + + queueSize = collectionCreations.size(); + log.trace( "starting serialization of [" + queueSize + "] collectionCreations entries" ); + oos.writeInt( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + oos.writeObject( collectionCreations.get( i ) ); + } + } + + /** + * Used by the owning session to explicitly control deserialization of the + * action queue + * + * @param ois The stream from which to read the action queue + * @throws IOException + */ + public static ActionQueue deserialize( + ObjectInputStream ois, + SessionImplementor session) throws IOException, ClassNotFoundException { + log.trace( "deserializing action-queue" ); + ActionQueue rtn = new ActionQueue( session ); + + int queueSize = ois.readInt(); + log.trace( "starting deserialization of [" + queueSize + "] insertions entries" ); + rtn.insertions = new ArrayList( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + rtn.insertions.add( ois.readObject() ); + } + + queueSize = ois.readInt(); + log.trace( "starting deserialization of [" + queueSize + "] deletions entries" ); + rtn.deletions = new ArrayList( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + rtn.deletions.add( ois.readObject() ); + } + + queueSize = ois.readInt(); + log.trace( "starting deserialization of [" + queueSize + "] updates entries" ); + rtn.updates = new ArrayList( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + rtn.updates.add( ois.readObject() ); + } + + queueSize = ois.readInt(); + log.trace( "starting deserialization of [" + queueSize + "] collectionUpdates entries" ); + rtn.collectionUpdates = new ArrayList( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + rtn.collectionUpdates.add( ois.readObject() ); + } + + queueSize = ois.readInt(); + log.trace( "starting deserialization of [" + queueSize + "] collectionRemovals entries" ); + rtn.collectionRemovals = new ArrayList( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + rtn.collectionRemovals.add( ois.readObject() ); + } + + queueSize = ois.readInt(); + log.trace( "starting deserialization of [" + queueSize + "] collectionCreations entries" ); + rtn.collectionCreations = new ArrayList( queueSize ); + for ( int i = 0; i < queueSize; i++ ) { + rtn.collectionCreations.add( ois.readObject() ); + } + return rtn; + } + +} diff --git a/src/org/hibernate/engine/AssociationKey.java b/src/org/hibernate/engine/AssociationKey.java new file mode 100755 index 0000000000..667639736a --- /dev/null +++ b/src/org/hibernate/engine/AssociationKey.java @@ -0,0 +1,31 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; + +/** + * Identifies a named association belonging to a particular + * entity instance. Used to record the fact that an association + * is null during loading. + * + * @author Gavin King + */ +final class AssociationKey implements Serializable { + private EntityKey ownerKey; + private String propertyName; + + public AssociationKey(EntityKey ownerKey, String propertyName) { + this.ownerKey = ownerKey; + this.propertyName = propertyName; + } + + public boolean equals(Object that) { + AssociationKey key = (AssociationKey) that; + return key.propertyName.equals(propertyName) && + key.ownerKey.equals(ownerKey); + } + + public int hashCode() { + return ownerKey.hashCode() + propertyName.hashCode(); + } +} \ No newline at end of file diff --git a/src/org/hibernate/engine/BatchFetchQueue.java b/src/org/hibernate/engine/BatchFetchQueue.java new file mode 100755 index 0000000000..b32e7d7d65 --- /dev/null +++ b/src/org/hibernate/engine/BatchFetchQueue.java @@ -0,0 +1,276 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import org.apache.commons.collections.SequencedHashMap; +import org.hibernate.EntityMode; +import org.hibernate.cache.CacheKey; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.util.MarkerObject; + +/** + * Tracks entity and collection keys that are available for batch + * fetching, and the queries which were used to load entities, which + * can be re-used as a subquery for loading owned collections. + * + * @author Gavin King + */ +public class BatchFetchQueue { + + public static final Object MARKER = new MarkerObject( "MARKER" ); + + /** + * Defines a sequence of {@link EntityKey} elements that are currently + * elegible for batch-fetching. + *

    + * Even though this is a map, we only use the keys. A map was chosen in + * order to utilize a {@link SequencedHashMap} to maintain sequencing + * as well as uniqueness. + *

    + * TODO : this would be better as a SequencedReferenceSet, but no such beast exists! + */ + private final Map batchLoadableEntityKeys = new SequencedHashMap(8); + + /** + * A map of {@link SubselectFetch subselect-fetch descriptors} keyed by the + * {@link EntityKey) against which the descriptor is registered. + */ + private final Map subselectsByEntityKey = new HashMap(8); + + /** + * The owning persistence context. + */ + private final PersistenceContext context; + + /** + * Constructs a queue for the given context. + * + * @param context The owning context. + */ + public BatchFetchQueue(PersistenceContext context) { + this.context = context; + } + + /** + * Clears all entries from this fetch queue. + */ + public void clear() { + batchLoadableEntityKeys.clear(); + subselectsByEntityKey.clear(); + } + + /** + * Retrieve the fetch descriptor associated with the given entity key. + * + * @param key The entity key for which to locate any defined subselect fetch. + * @return The fetch descriptor; may return null if no subselect fetch queued for + * this entity key. + */ + public SubselectFetch getSubselect(EntityKey key) { + return (SubselectFetch) subselectsByEntityKey.get(key); + } + + /** + * Adds a subselect fetch decriptor for the given entity key. + * + * @param key The entity for which to register the subselect fetch. + * @param subquery The fetch descriptor. + */ + public void addSubselect(EntityKey key, SubselectFetch subquery) { + subselectsByEntityKey.put(key, subquery); + } + + /** + * After evicting or deleting an entity, we don't need to + * know the query that was used to load it anymore (don't + * call this after loading the entity, since we might still + * need to load its collections) + */ + public void removeSubselect(EntityKey key) { + subselectsByEntityKey.remove(key); + } + + /** + * Clears all pending subselect fetches from the queue. + *

    + * Called after flushing. + */ + public void clearSubselects() { + subselectsByEntityKey.clear(); + } + + /** + * If an EntityKey represents a batch loadable entity, add + * it to the queue. + *

    + * Note that the contract here is such that any key passed in should + * previously have been been checked for existence within the + * {@link PersistenceContext}; failure to do so may cause the + * referenced entity to be included in a batch even though it is + * already associated with the {@link PersistenceContext}. + */ + public void addBatchLoadableEntityKey(EntityKey key) { + if ( key.isBatchLoadable() ) { + batchLoadableEntityKeys.put( key, MARKER ); + } + } + + /** + * After evicting or deleting or loading an entity, we don't + * need to batch fetch it anymore, remove it from the queue + * if necessary + */ + public void removeBatchLoadableEntityKey(EntityKey key) { + if ( key.isBatchLoadable() ) batchLoadableEntityKeys.remove(key); + } + + /** + * Get a batch of uninitialized collection keys for a given role + * + * @param collectionPersister The persister for the collection role. + * @param id A key that must be included in the batch fetch + * @param batchSize the maximum number of keys to return + * @return an array of collection keys, of length batchSize (padded with nulls) + */ + public Serializable[] getCollectionBatch( + final CollectionPersister collectionPersister, + final Serializable id, + final int batchSize, + final EntityMode entityMode) { + Serializable[] keys = new Serializable[batchSize]; + keys[0] = id; + int i = 1; + //int count = 0; + int end = -1; + boolean checkForEnd = false; + // this only works because collection entries are kept in a sequenced + // map by persistence context (maybe we should do like entities and + // keep a separate sequences set...) + Iterator iter = context.getCollectionEntries().entrySet().iterator(); //TODO: calling entrySet on an IdentityMap is SLOW!! + while ( iter.hasNext() ) { + Map.Entry me = (Map.Entry) iter.next(); + + CollectionEntry ce = (CollectionEntry) me.getValue(); + PersistentCollection collection = (PersistentCollection) me.getKey(); + if ( !collection.wasInitialized() && ce.getLoadedPersister() == collectionPersister ) { + + if ( checkForEnd && i == end ) { + return keys; //the first key found after the given key + } + + //if ( end == -1 && count > batchSize*10 ) return keys; //try out ten batches, max + + final boolean isEqual = collectionPersister.getKeyType().isEqual( + id, + ce.getLoadedKey(), + entityMode, + collectionPersister.getFactory() + ); + + if ( isEqual ) { + end = i; + //checkForEnd = false; + } + else if ( !isCached( ce.getLoadedKey(), collectionPersister, entityMode ) ) { + keys[i++] = ce.getLoadedKey(); + //count++; + } + + if ( i == batchSize ) { + i = 1; //end of array, start filling again from start + if ( end != -1 ) { + checkForEnd = true; + } + } + } + + } + return keys; //we ran out of keys to try + } + + /** + * Get a batch of unloaded identifiers for this class, using a slightly + * complex algorithm that tries to grab keys registered immediately after + * the given key. + * + * @param persister The persister for the entities being loaded. + * @param id The identifier of the entity currently demanding load. + * @param batchSize The maximum number of keys to return + * @return an array of identifiers, of length batchSize (possibly padded with nulls) + */ + public Serializable[] getEntityBatch( + final EntityPersister persister, + final Serializable id, + final int batchSize, + final EntityMode entityMode) { + Serializable[] ids = new Serializable[batchSize]; + ids[0] = id; //first element of array is reserved for the actual instance we are loading! + int i = 1; + int end = -1; + boolean checkForEnd = false; + + Iterator iter = batchLoadableEntityKeys.keySet().iterator(); + while ( iter.hasNext() ) { + EntityKey key = (EntityKey) iter.next(); + if ( key.getEntityName().equals( persister.getEntityName() ) ) { //TODO: this needn't exclude subclasses... + if ( checkForEnd && i == end ) { + //the first id found after the given id + return ids; + } + if ( persister.getIdentifierType().isEqual( id, key.getIdentifier(), entityMode ) ) { + end = i; + } + else { + if ( !isCached( key, persister, entityMode ) ) { + ids[i++] = key.getIdentifier(); + } + } + if ( i == batchSize ) { + i = 1; //end of array, start filling again from start + if (end!=-1) checkForEnd = true; + } + } + } + return ids; //we ran out of ids to try + } + + private boolean isCached( + EntityKey entityKey, + EntityPersister persister, + EntityMode entityMode) { + if ( persister.hasCache() ) { + CacheKey key = new CacheKey( + entityKey.getIdentifier(), + persister.getIdentifierType(), + entityKey.getEntityName(), + entityMode, + context.getSession().getFactory() + ); + return persister.getCache().getCache().get( key ) != null; + } + return false; + } + + private boolean isCached( + Serializable collectionKey, + CollectionPersister persister, + EntityMode entityMode) { + if ( persister.hasCache() ) { + CacheKey cacheKey = new CacheKey( + collectionKey, + persister.getKeyType(), + persister.getRole(), + entityMode, + context.getSession().getFactory() + ); + return persister.getCache().getCache().get( cacheKey ) != null; + } + return false; + } +} diff --git a/src/org/hibernate/engine/Cascade.java b/src/org/hibernate/engine/Cascade.java new file mode 100644 index 0000000000..d89e7b59b7 --- /dev/null +++ b/src/org/hibernate/engine/Cascade.java @@ -0,0 +1,360 @@ +//$Id$ +package org.hibernate.engine; + +import java.util.Collection; +import java.util.Iterator; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.event.EventSource; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.AssociationType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; +import org.hibernate.util.CollectionHelper; + +/** + * Delegate responsible for, in conjunction with the various + * {@link CascadingAction actions}, implementing cascade processing. + * + * @author Gavin King + * @see CascadingAction + */ +public final class Cascade { + + /** + * A cascade point that occurs just after the insertion of the parent entity and + * just before deletion + */ + public static final int AFTER_INSERT_BEFORE_DELETE = 1; + /** + * A cascade point that occurs just before the insertion of the parent entity and + * just after deletion + */ + public static final int BEFORE_INSERT_AFTER_DELETE = 2; + /** + * A cascade point that occurs just after the insertion of the parent entity and + * just before deletion, inside a collection + */ + public static final int AFTER_INSERT_BEFORE_DELETE_VIA_COLLECTION = 3; + /** + * A cascade point that occurs just after update of the parent entity + */ + public static final int AFTER_UPDATE = 0; + /** + * A cascade point that occurs just before the session is flushed + */ + public static final int BEFORE_FLUSH = 0; + /** + * A cascade point that occurs just after eviction of the parent entity from the + * session cache + */ + public static final int AFTER_EVICT = 0; + /** + * A cascade point that occurs just after locking a transient parent entity into the + * session cache + */ + public static final int BEFORE_REFRESH = 0; + /** + * A cascade point that occurs just after refreshing a parent entity + */ + public static final int AFTER_LOCK = 0; + /** + * A cascade point that occurs just before merging from a transient parent entity into + * the object in the session cache + */ + public static final int BEFORE_MERGE = 0; + + + private static final Log log = LogFactory.getLog( Cascade.class ); + + + private int cascadeTo; + private EventSource eventSource; + private CascadingAction action; + + public Cascade(final CascadingAction action, final int cascadeTo, final EventSource eventSource) { + this.cascadeTo = cascadeTo; + this.eventSource = eventSource; + this.action = action; + } + + /** + * Cascade an action from the parent entity instance to all its children. + * + * @param persister The parent's entity persister + * @param parent The parent reference. + * @throws HibernateException + */ + public void cascade(final EntityPersister persister, final Object parent) + throws HibernateException { + cascade( persister, parent, null ); + } + + /** + * Cascade an action from the parent entity instance to all its children. This + * form is typicaly called from within cascade actions. + * + * @param persister The parent's entity persister + * @param parent The parent reference. + * @param anything Anything ;) Typically some form of cascade-local cache + * which is specific to each CascadingAction type + * @throws HibernateException + */ + public void cascade(final EntityPersister persister, final Object parent, final Object anything) + throws HibernateException { + + if ( persister.hasCascades() || action.requiresNoCascadeChecking() ) { // performance opt + if ( log.isTraceEnabled() ) { + log.trace( "processing cascade " + action + " for: " + persister.getEntityName() ); + } + + Type[] types = persister.getPropertyTypes(); + CascadeStyle[] cascadeStyles = persister.getPropertyCascadeStyles(); + EntityMode entityMode = eventSource.getEntityMode(); + boolean hasUninitializedLazyProperties = persister.hasUninitializedLazyProperties( parent, entityMode ); + for ( int i=0; i + * For this style, should the given action really be cascaded? The default + * implementation is simply to return {@link #doCascade}; for certain + * styles (currently only delete-orphan), however, we need to be able to + * control this seperately. + * + * @param action The action to be checked for cascade-ability. + * @return True if the action should be really cascaded under this style; + * false otherwise. + */ + public boolean reallyDoCascade(CascadingAction action) { + return doCascade(action); + } + + /** + * Do we need to delete orphaned collection elements? + * + * @return True if this style need to account for orphan delete + * operations; false othwerwise. + */ + public boolean hasOrphanDelete() { + return false; + } + + public static final class MultipleCascadeStyle extends CascadeStyle { + private final CascadeStyle[] styles; + public MultipleCascadeStyle(CascadeStyle[] styles) { + this.styles = styles; + } + public boolean doCascade(CascadingAction action) { + for (int i=0; i " + childEntityName + ); + + } + } + } + public boolean performOnLazyProperty() { + return false; + } + + private boolean isInManagedState(Object child, EventSource session) { + EntityEntry entry = session.getPersistenceContext().getEntry( child ); + return entry != null && (entry.getStatus() == Status.MANAGED || entry.getStatus() == Status.READ_ONLY); + } + + public String toString() { + return "ACTION_PERSIST_ON_FLUSH"; + } + }; + + /** + * @see org.hibernate.Session#replicate(Object, org.hibernate.ReplicationMode) + */ + public static final CascadingAction REPLICATE = new CascadingAction() { + public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) + throws HibernateException { + if ( log.isTraceEnabled() ) { + log.trace( "cascading to replicate: " + entityName ); + } + session.replicate( entityName, child, (ReplicationMode) anything ); + } + public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) { + // replicate does cascade to uninitialized collections + return getLoadedElementsIterator(session, collectionType, collection); + } + public boolean deleteOrphans() { + return false; //I suppose? + } + public String toString() { + return "ACTION_REPLICATE"; + } + }; + + + // static helper methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Given a collection, get an iterator of all its children, loading them + * from the database if necessary. + * + * @param session The session within which the cascade is occuring. + * @param collectionType The mapping type of the collection. + * @param collection The collection instance. + * @return The children iterator. + */ + private static Iterator getAllElementsIterator( + EventSource session, + CollectionType collectionType, + Object collection) { + return collectionType.getElementsIterator( collection, session ); + } + + /** + * Iterate just the elements of the collection that are already there. Don't load + * any new elements from the database. + */ + public static Iterator getLoadedElementsIterator(SessionImplementor session, CollectionType collectionType, Object collection) { + if ( collectionIsInitialized(collection) ) { + // handles arrays and newly instantiated collections + return collectionType.getElementsIterator(collection, session); + } + else { + // does not handle arrays (thats ok, cos they can't be lazy) + // or newly instantiated collections, so we can do the cast + return ( (PersistentCollection) collection ).queuedAdditionIterator(); + } + } + + private static boolean collectionIsInitialized(Object collection) { + return !(collection instanceof PersistentCollection) || ( (PersistentCollection) collection ).wasInitialized(); + } + +} \ No newline at end of file diff --git a/src/org/hibernate/engine/CollectionEntry.java b/src/org/hibernate/engine/CollectionEntry.java new file mode 100644 index 0000000000..d196db6086 --- /dev/null +++ b/src/org/hibernate/engine/CollectionEntry.java @@ -0,0 +1,397 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.util.Collection; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; + +/** + * We need an entry to tell us all about the current state + * of a collection with respect to its persistent state + * + * @author Gavin King + */ +public final class CollectionEntry implements Serializable { + + private static final Log log = LogFactory.getLog(CollectionEntry.class); + + //ATTRIBUTES MAINTAINED BETWEEN FLUSH CYCLES + + // session-start/post-flush persistent state + private Serializable snapshot; + // allow the CollectionSnapshot to be serialized + private String role; + + // "loaded" means the reference that is consistent + // with the current database state + private transient CollectionPersister loadedPersister; + private Serializable loadedKey; + + // ATTRIBUTES USED ONLY DURING FLUSH CYCLE + + // during flush, we navigate the object graph to + // collections and decide what to do with them + private transient boolean reached; + private transient boolean processed; + private transient boolean doupdate; + private transient boolean doremove; + private transient boolean dorecreate; + // if we instantiate a collection during the flush() process, + // we must ignore it for the rest of the flush() + private transient boolean ignore; + + // "current" means the reference that was found during flush() + private transient CollectionPersister currentPersister; + private transient Serializable currentKey; + + /** + * For newly wrapped collections, or dereferenced collection wrappers + */ + public CollectionEntry(CollectionPersister persister, PersistentCollection collection) { + // new collections that get found + wrapped + // during flush shouldn't be ignored + ignore = false; + + collection.clearDirty(); //a newly wrapped collection is NOT dirty (or we get unnecessary version updates) + + snapshot = persister.isMutable() ? + collection.getSnapshot(persister) : + null; + collection.setSnapshot(loadedKey, role, snapshot); + } + + /** + * For collections just loaded from the database + */ + public CollectionEntry( + final PersistentCollection collection, + final CollectionPersister loadedPersister, + final Serializable loadedKey, + final boolean ignore + ) { + this.ignore=ignore; + + //collection.clearDirty() + + this.loadedKey = loadedKey; + setLoadedPersister(loadedPersister); + + collection.setSnapshot(loadedKey, role, null); + + //postInitialize() will be called after initialization + } + + /** + * For uninitialized detached collections + */ + public CollectionEntry(CollectionPersister loadedPersister, Serializable loadedKey) { + // detached collection wrappers that get found + reattached + // during flush shouldn't be ignored + ignore = false; + + //collection.clearDirty() + + this.loadedKey = loadedKey; + setLoadedPersister(loadedPersister); + } + + /** + * For initialized detached collections + */ + CollectionEntry(PersistentCollection collection, SessionFactoryImplementor factory) + throws MappingException { + // detached collections that get found + reattached + // during flush shouldn't be ignored + ignore = false; + + loadedKey = collection.getKey(); + setLoadedPersister( factory.getCollectionPersister( collection.getRole() ) ); + + snapshot = collection.getStoredSnapshot(); + } + + /** + * Used from custom serialization. + * + * @see #serialize + * @see #deserialize + */ + private CollectionEntry( + String role, + Serializable snapshot, + Serializable loadedKey, + SessionFactoryImplementor factory) { + this.role = role; + this.snapshot = snapshot; + this.loadedKey = loadedKey; + if ( role != null ) { + afterDeserialize( factory ); + } + } + + /** + * Determine if the collection is "really" dirty, by checking dirtiness + * of the collection elements, if necessary + */ + private void dirty(PersistentCollection collection) throws HibernateException { + + boolean forceDirty = collection.wasInitialized() && + !collection.isDirty() && //optimization + getLoadedPersister() != null && + getLoadedPersister().isMutable() && //optimization + ( collection.isDirectlyAccessible() || getLoadedPersister().getElementType().isMutable() ) && //optimization + !collection.equalsSnapshot( getLoadedPersister() ); + + if ( forceDirty ) { + collection.dirty(); + } + + } + + public void preFlush(PersistentCollection collection) throws HibernateException { + + boolean nonMutableChange = collection.isDirty() && + getLoadedPersister()!=null && + !getLoadedPersister().isMutable(); + if (nonMutableChange) { + throw new HibernateException( + "changed an immutable collection instance: " + + MessageHelper.collectionInfoString( getLoadedPersister().getRole(), getLoadedKey() ) + ); + } + + dirty(collection); + + if ( log.isDebugEnabled() && collection.isDirty() && getLoadedPersister() != null ) { + log.debug( + "Collection dirty: " + + MessageHelper.collectionInfoString( getLoadedPersister().getRole(), getLoadedKey() ) + ); + } + + setDoupdate(false); + setDoremove(false); + setDorecreate(false); + setReached(false); + setProcessed(false); + } + + public void postInitialize(PersistentCollection collection) throws HibernateException { + snapshot = getLoadedPersister().isMutable() ? + collection.getSnapshot( getLoadedPersister() ) : + null; + collection.setSnapshot(loadedKey, role, snapshot); + } + + /** + * Called after a successful flush + */ + public void postFlush(PersistentCollection collection) throws HibernateException { + if ( isIgnore() ) { + ignore = false; + } + else if ( !isProcessed() ) { + throw new AssertionFailure( "collection [" + collection.getRole() + "] was not processed by flush()" ); + } + collection.setSnapshot(loadedKey, role, snapshot); + } + + /** + * Called after execution of an action + */ + public void afterAction(PersistentCollection collection) { + loadedKey = getCurrentKey(); + setLoadedPersister( getCurrentPersister() ); + + boolean resnapshot = collection.wasInitialized() && + ( isDoremove() || isDorecreate() || isDoupdate() ); + if ( resnapshot ) { + snapshot = loadedPersister==null || !loadedPersister.isMutable() ? + null : + collection.getSnapshot(loadedPersister); //re-snapshot + } + + collection.postAction(); + } + + public Serializable getKey() { + return getLoadedKey(); + } + + public String getRole() { + return role; + } + + public Serializable getSnapshot() { + return snapshot; + } + + private void setLoadedPersister(CollectionPersister persister) { + loadedPersister = persister; + setRole( persister == null ? null : persister.getRole() ); + } + + void afterDeserialize(SessionFactoryImplementor factory) { + loadedPersister = factory.getCollectionPersister(role); + } + + public boolean wasDereferenced() { + return getLoadedKey() == null; + } + + public boolean isReached() { + return reached; + } + + public void setReached(boolean reached) { + this.reached = reached; + } + + public boolean isProcessed() { + return processed; + } + + public void setProcessed(boolean processed) { + this.processed = processed; + } + + public boolean isDoupdate() { + return doupdate; + } + + public void setDoupdate(boolean doupdate) { + this.doupdate = doupdate; + } + + public boolean isDoremove() { + return doremove; + } + + public void setDoremove(boolean doremove) { + this.doremove = doremove; + } + + public boolean isDorecreate() { + return dorecreate; + } + + public void setDorecreate(boolean dorecreate) { + this.dorecreate = dorecreate; + } + + public boolean isIgnore() { + return ignore; + } + + public CollectionPersister getCurrentPersister() { + return currentPersister; + } + + public void setCurrentPersister(CollectionPersister currentPersister) { + this.currentPersister = currentPersister; + } + + /** + * This is only available late during the flush + * cycle + */ + public Serializable getCurrentKey() { + return currentKey; + } + + public void setCurrentKey(Serializable currentKey) { + this.currentKey = currentKey; + } + + /** + * This is only available late during the flush cycle + */ + public CollectionPersister getLoadedPersister() { + return loadedPersister; + } + + public Serializable getLoadedKey() { + return loadedKey; + } + + public void setRole(String role) { + this.role = role; + } + + public String toString() { + String result = "CollectionEntry" + + MessageHelper.collectionInfoString( loadedPersister.getRole(), loadedKey ); + if (currentPersister!=null) { + result += "->" + + MessageHelper.collectionInfoString( currentPersister.getRole(), currentKey ); + } + return result; + } + + /** + * Get the collection orphans (entities which were removed from the collection) + */ + public Collection getOrphans(String entityName, PersistentCollection collection) + throws HibernateException { + if (snapshot==null) { + throw new AssertionFailure("no collection snapshot for orphan delete"); + } + return collection.getOrphans( snapshot, entityName ); + } + + public boolean isSnapshotEmpty(PersistentCollection collection) { + //TODO: does this really need to be here? + // does the collection already have + // it's own up-to-date snapshot? + return collection.wasInitialized() && + ( getLoadedPersister()==null || getLoadedPersister().isMutable() ) && + collection.isSnapshotEmpty( getSnapshot() ); + } + + + + /** + * Custom serialization routine used during serialization of a + * Session/PersistenceContext for increased performance. + * + * @param oos The stream to which we should write the serial data. + * @throws java.io.IOException + */ + void serialize(ObjectOutputStream oos) throws IOException { + oos.writeObject( role ); + oos.writeObject( snapshot ); + oos.writeObject( loadedKey ); + } + + /** + * Custom deserialization routine used during deserialization of a + * Session/PersistenceContext for increased performance. + * + * @param ois The stream from which to read the entry. + * @param session The session being deserialized. + * @return The deserialized CollectionEntry + * @throws IOException + * @throws ClassNotFoundException + */ + static CollectionEntry deserialize( + ObjectInputStream ois, + SessionImplementor session) throws IOException, ClassNotFoundException { + return new CollectionEntry( + ( String ) ois.readObject(), + ( Serializable ) ois.readObject(), + ( Serializable ) ois.readObject(), + session.getFactory() + ); + } +} \ No newline at end of file diff --git a/src/org/hibernate/engine/CollectionKey.java b/src/org/hibernate/engine/CollectionKey.java new file mode 100644 index 0000000000..e183514718 --- /dev/null +++ b/src/org/hibernate/engine/CollectionKey.java @@ -0,0 +1,113 @@ +//$Id$ +package org.hibernate.engine; + +import org.hibernate.EntityMode; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.Type; + + + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; + +/** + * Uniquely identifies a collection instance in a particular session. + * + * @author Gavin King + */ +public final class CollectionKey implements Serializable { + + private final String role; + private final Serializable key; + private final Type keyType; + private final SessionFactoryImplementor factory; + private final int hashCode; + private EntityMode entityMode; + + public CollectionKey(CollectionPersister persister, Serializable key, EntityMode em) { + this( persister.getRole(), key, persister.getKeyType(), em, persister.getFactory() ); + } + + private CollectionKey( + String role, + Serializable key, + Type keyType, + EntityMode entityMode, + SessionFactoryImplementor factory) { + this.role = role; + this.key = key; + this.keyType = keyType; + this.entityMode = entityMode; + this.factory = factory; + this.hashCode = generateHashCode(); //cache the hashcode + } + + public boolean equals(Object other) { + CollectionKey that = (CollectionKey) other; + return that.role.equals(role) && + keyType.isEqual(that.key, key, entityMode, factory); + } + + public int generateHashCode() { + int result = 17; + result = 37 * result + role.hashCode(); + result = 37 * result + keyType.getHashCode(key, entityMode, factory); + return result; + } + + public int hashCode() { + return hashCode; + } + + public String getRole() { + return role; + } + + public Serializable getKey() { + return key; + } + + public String toString() { + return "CollectionKey" + + MessageHelper.collectionInfoString( factory.getCollectionPersister(role), key, factory ); + } + + /** + * Custom serialization routine used during serialization of a + * Session/PersistenceContext for increased performance. + * + * @param oos The stream to which we should write the serial data. + * @throws java.io.IOException + */ + void serialize(ObjectOutputStream oos) throws IOException { + oos.writeObject( role ); + oos.writeObject( key ); + oos.writeObject( keyType ); + oos.writeObject( entityMode.toString() ); + } + + /** + * Custom deserialization routine used during deserialization of a + * Session/PersistenceContext for increased performance. + * + * @param ois The stream from which to read the entry. + * @param session The session being deserialized. + * @return The deserialized CollectionKey + * @throws IOException + * @throws ClassNotFoundException + */ + static CollectionKey deserialize( + ObjectInputStream ois, + SessionImplementor session) throws IOException, ClassNotFoundException { + return new CollectionKey( + ( String ) ois.readObject(), + ( Serializable ) ois.readObject(), + ( Type ) ois.readObject(), + EntityMode.parse( ( String ) ois.readObject() ), + session.getFactory() + ); + } +} \ No newline at end of file diff --git a/src/org/hibernate/engine/Collections.java b/src/org/hibernate/engine/Collections.java new file mode 100755 index 0000000000..63ade70b2e --- /dev/null +++ b/src/org/hibernate/engine/Collections.java @@ -0,0 +1,254 @@ +//$Id$ +package org.hibernate.engine; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.CollectionType; + +import java.io.Serializable; + +/** + * Implements book-keeping for the collection persistence by reachability algorithm + * @author Gavin King + */ +public final class Collections { + + private Collections() {} + + private static final Log log = LogFactory.getLog(Collections.class); + + /** + * record the fact that this collection was dereferenced + * + * @param coll The collection to be updated by unreachability. + * @throws HibernateException + */ + public static void processUnreachableCollection(PersistentCollection coll, SessionImplementor session) + throws HibernateException { + + if ( coll.getOwner()==null ) { + processNeverReferencedCollection(coll, session); + } + else { + processDereferencedCollection(coll, session); + } + + } + + private static void processDereferencedCollection(PersistentCollection coll, SessionImplementor session) + throws HibernateException { + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + CollectionEntry entry = persistenceContext.getCollectionEntry(coll); + final CollectionPersister loadedPersister = entry.getLoadedPersister(); + + if ( log.isDebugEnabled() && loadedPersister != null ) + log.debug( + "Collection dereferenced: " + + MessageHelper.collectionInfoString( + loadedPersister, + entry.getLoadedKey(), + session.getFactory() + ) + ); + + // do a check + boolean hasOrphanDelete = loadedPersister != null && + loadedPersister.hasOrphanDelete(); + if (hasOrphanDelete) { + Serializable ownerId = loadedPersister.getOwnerEntityPersister() + .getIdentifier( coll.getOwner(), session.getEntityMode() ); + if ( ownerId == null ) { + // the owning entity may have been deleted and its identifier unset due to + // identifier-rollback; in which case, try to look up its identifier from + // the persistence context + if ( session.getFactory().getSettings().isIdentifierRollbackEnabled() ) { + EntityEntry ownerEntry = persistenceContext.getEntry( coll.getOwner() ); + if ( ownerEntry != null ) { + ownerId = ownerEntry.getId(); + } + } + if ( ownerId == null ) { + throw new AssertionFailure( "Unable to determine collection owner identifier for orphan-delete processing" ); + } + } + EntityKey key = new EntityKey( + ownerId, + loadedPersister.getOwnerEntityPersister(), + session.getEntityMode() + ); + Object owner = persistenceContext.getEntity(key); + if ( owner == null ) { + throw new AssertionFailure( + "collection owner not associated with session: " + + loadedPersister.getRole() + ); + } + EntityEntry e = persistenceContext.getEntry(owner); + //only collections belonging to deleted entities are allowed to be dereferenced in the case of orphan delete + if ( e != null && e.getStatus() != Status.DELETED && e.getStatus() != Status.GONE ) { + throw new HibernateException( + "A collection with cascade=\"all-delete-orphan\" was no longer referenced by the owning entity instance: " + + loadedPersister.getRole() + ); + } + } + + // do the work + entry.setCurrentPersister(null); + entry.setCurrentKey(null); + prepareCollectionForUpdate( coll, entry, session.getEntityMode(), session.getFactory() ); + + } + + private static void processNeverReferencedCollection(PersistentCollection coll, SessionImplementor session) + throws HibernateException { + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + CollectionEntry entry = persistenceContext.getCollectionEntry(coll); + + log.debug( + "Found collection with unloaded owner: " + + MessageHelper.collectionInfoString( + entry.getLoadedPersister(), + entry.getLoadedKey(), + session.getFactory() + ) + ); + + entry.setCurrentPersister( entry.getLoadedPersister() ); + entry.setCurrentKey( entry.getLoadedKey() ); + + prepareCollectionForUpdate( coll, entry, session.getEntityMode(), session.getFactory() ); + + } + + /** + * Initialize the role of the collection. + * + * @param collection The collection to be updated by reachibility. + * @param type The type of the collection. + * @param entity The owner of the collection. + * @throws HibernateException + */ + public static void processReachableCollection( + PersistentCollection collection, + CollectionType type, + Object entity, + SessionImplementor session) + throws HibernateException { + + collection.setOwner(entity); + + CollectionEntry ce = session.getPersistenceContext().getCollectionEntry(collection); + + if ( ce == null ) { + // refer to comment in StatefulPersistenceContext.addCollection() + throw new HibernateException( + "Found two representations of same collection: " + + type.getRole() + ); + } + + // The CollectionEntry.isReached() stuff is just to detect any silly users + // who set up circular or shared references between/to collections. + if ( ce.isReached() ) { + // We've been here before + throw new HibernateException( + "Found shared references to a collection: " + + type.getRole() + ); + } + ce.setReached(true); + + SessionFactoryImplementor factory = session.getFactory(); + CollectionPersister persister = factory.getCollectionPersister( type.getRole() ); + ce.setCurrentPersister(persister); + ce.setCurrentKey( type.getKeyOfOwner(entity, session) ); //TODO: better to pass the id in as an argument? + + if ( log.isDebugEnabled() ) { + log.debug( + "Collection found: " + + MessageHelper.collectionInfoString( persister, ce.getCurrentKey(), factory ) + + ", was: " + + MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(), factory ) + + ( collection.wasInitialized() ? " (initialized)" : " (uninitialized)" ) + ); + } + + prepareCollectionForUpdate( collection, ce, session.getEntityMode(), factory ); + + } + + /** + * 1. record the collection role that this collection is referenced by + * 2. decide if the collection needs deleting/creating/updating (but + * don't actually schedule the action yet) + */ + private static void prepareCollectionForUpdate( + PersistentCollection collection, + CollectionEntry entry, + EntityMode entityMode, + SessionFactoryImplementor factory) + throws HibernateException { + + if ( entry.isProcessed() ) { + throw new AssertionFailure( "collection was processed twice by flush()" ); + } + entry.setProcessed(true); + + final CollectionPersister loadedPersister = entry.getLoadedPersister(); + final CollectionPersister currentPersister = entry.getCurrentPersister(); + if ( loadedPersister != null || currentPersister != null ) { // it is or was referenced _somewhere_ + + boolean ownerChanged = loadedPersister != currentPersister || // if either its role changed, + !currentPersister + .getKeyType().isEqual( // or its key changed + entry.getLoadedKey(), + entry.getCurrentKey(), + entityMode, factory + ); + + if (ownerChanged) { + + // do a check + final boolean orphanDeleteAndRoleChanged = loadedPersister != null && + currentPersister != null && + loadedPersister.hasOrphanDelete(); + + if (orphanDeleteAndRoleChanged) { + throw new HibernateException( + "Don't change the reference to a collection with cascade=\"all-delete-orphan\": " + + loadedPersister.getRole() + ); + } + + // do the work + if ( currentPersister != null ) { + entry.setDorecreate(true); // we will need to create new entries + } + + if ( loadedPersister != null ) { + entry.setDoremove(true); // we will need to remove ye olde entries + if ( entry.isDorecreate() ) { + log.trace( "Forcing collection initialization" ); + collection.forceInitialization(); // force initialize! + } + } + + } + else if ( collection.isDirty() ) { // else if it's elements changed + entry.setDoupdate(true); + } + + } + + } + +} diff --git a/src/org/hibernate/engine/EntityEntry.java b/src/org/hibernate/engine/EntityEntry.java new file mode 100644 index 0000000000..7b36e29f06 --- /dev/null +++ b/src/org/hibernate/engine/EntityEntry.java @@ -0,0 +1,316 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; + + +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.intercept.FieldInterceptionHelper; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.UniqueKeyLoadable; +import org.hibernate.pretty.MessageHelper; + +/** + * We need an entry to tell us all about the current state + * of an object with respect to its persistent state + * + * @author Gavin King + */ +public final class EntityEntry implements Serializable { + + private LockMode lockMode; + private Status status; + private final Serializable id; + private Object[] loadedState; + private Object[] deletedState; + private boolean existsInDatabase; + private Object version; + private transient EntityPersister persister; // for convenience to save some lookups + private final EntityMode entityMode; + private final String entityName; + private boolean isBeingReplicated; + private boolean loadedWithLazyPropertiesUnfetched; //NOTE: this is not updated when properties are fetched lazily! + private final transient Object rowId; + + EntityEntry( + final Status status, + final Object[] loadedState, + final Object rowId, + final Serializable id, + final Object version, + final LockMode lockMode, + final boolean existsInDatabase, + final EntityPersister persister, + final EntityMode entityMode, + final boolean disableVersionIncrement, + final boolean lazyPropertiesAreUnfetched) { + this.status=status; + this.loadedState=loadedState; + this.id=id; + this.rowId=rowId; + this.existsInDatabase=existsInDatabase; + this.version=version; + this.lockMode=lockMode; + this.isBeingReplicated=disableVersionIncrement; + this.loadedWithLazyPropertiesUnfetched = lazyPropertiesAreUnfetched; + this.persister=persister; + this.entityMode = entityMode; + this.entityName = persister == null ? + null : persister.getEntityName(); + } + + /** + * Used during custom deserialization + */ + private EntityEntry( + final SessionFactoryImplementor factory, + final String entityName, + final Serializable id, + final EntityMode entityMode, + final Status status, + final Object[] loadedState, + final Object[] deletedState, + final Object version, + final LockMode lockMode, + final boolean existsInDatabase, + final boolean isBeingReplicated, + final boolean loadedWithLazyPropertiesUnfetched) { + this.entityName = entityName; + this.persister = factory.getEntityPersister( entityName ); + this.id = id; + this.entityMode = entityMode; + this.status = status; + this.loadedState = loadedState; + this.deletedState = deletedState; + this.version = version; + this.lockMode = lockMode; + this.existsInDatabase = existsInDatabase; + this.isBeingReplicated = isBeingReplicated; + this.loadedWithLazyPropertiesUnfetched = loadedWithLazyPropertiesUnfetched; + this.rowId = null; // this is equivalent to the old behavior... + } + + public LockMode getLockMode() { + return lockMode; + } + + public void setLockMode(LockMode lockMode) { + this.lockMode = lockMode; + } + + public Status getStatus() { + return status; + } + + public void setStatus(Status status) { + if (status==Status.READ_ONLY) { + loadedState = null; //memory optimization + } + this.status = status; + } + + public Serializable getId() { + return id; + } + + public Object[] getLoadedState() { + return loadedState; + } + + public Object[] getDeletedState() { + return deletedState; + } + + public void setDeletedState(Object[] deletedState) { + this.deletedState = deletedState; + } + + public boolean isExistsInDatabase() { + return existsInDatabase; + } + + public Object getVersion() { + return version; + } + + public EntityPersister getPersister() { + return persister; + } + + void afterDeserialize(SessionFactoryImplementor factory) { + persister = factory.getEntityPersister( entityName ); + } + + public String getEntityName() { + return entityName; + } + + public boolean isBeingReplicated() { + return isBeingReplicated; + } + + public Object getRowId() { + return rowId; + } + + /** + * After actually updating the database, update the snapshot information, + * and escalate the lock mode + */ + public void postUpdate(Object entity, Object[] updatedState, Object nextVersion) { + this.loadedState = updatedState; + + setLockMode(LockMode.WRITE); + + if ( getPersister().isVersioned() ) { + this.version = nextVersion; + getPersister().setPropertyValue( + entity, + getPersister().getVersionProperty(), + nextVersion, + entityMode + ); + } + + FieldInterceptionHelper.clearDirty( entity ); + } + + /** + * After actually deleting a row, record the fact that the instance no longer + * exists in the database + */ + public void postDelete() { + status = Status.GONE; + existsInDatabase = false; + } + + /** + * After actually inserting a row, record the fact that the instance exists on the + * database (needed for identity-column key generation) + */ + public void postInsert() { + existsInDatabase = true; + } + + public boolean isNullifiable(boolean earlyInsert, SessionImplementor session) { + return getStatus() == Status.SAVING || ( + earlyInsert ? + !isExistsInDatabase() : + session.getPersistenceContext().getNullifiableEntityKeys() + .contains( new EntityKey( getId(), getPersister(), entityMode ) ) + ); + } + + public Object getLoadedValue(String propertyName) { + int propertyIndex = ( (UniqueKeyLoadable) persister ).getPropertyIndex(propertyName); + return loadedState[propertyIndex]; + } + + + public boolean requiresDirtyCheck(Object entity) { + + boolean isMutableInstance = + status != Status.READ_ONLY && + persister.isMutable(); + + return isMutableInstance && ( + getPersister().hasMutableProperties() || + !FieldInterceptionHelper.isInstrumented( entity ) || + FieldInterceptionHelper.extractFieldInterceptor( entity).isDirty() + ); + + } + + public void forceLocked(Object entity, Object nextVersion) { + version = nextVersion; + loadedState[ persister.getVersionProperty() ] = version; + setLockMode( LockMode.FORCE ); + persister.setPropertyValue( + entity, + getPersister().getVersionProperty(), + nextVersion, + entityMode + ); + } + + public void setReadOnly(boolean readOnly, Object entity) { + if (status!=Status.MANAGED && status!=Status.READ_ONLY) { + throw new HibernateException("instance was not in a valid state"); + } + if (readOnly) { + setStatus(Status.READ_ONLY); + loadedState = null; + } + else { + setStatus(Status.MANAGED); + loadedState = getPersister().getPropertyValues(entity, entityMode); + } + } + + public String toString() { + return "EntityEntry" + + MessageHelper.infoString(entityName, id) + + '(' + status + ')'; + } + + public boolean isLoadedWithLazyPropertiesUnfetched() { + return loadedWithLazyPropertiesUnfetched; + } + + + /** + * Custom serialization routine used during serialization of a + * Session/PersistenceContext for increased performance. + * + * @param oos The stream to which we should write the serial data. + * @throws java.io.IOException + */ + void serialize(ObjectOutputStream oos) throws IOException { + oos.writeObject( entityName ); + oos.writeObject( id ); + oos.writeObject( entityMode.toString() ); + oos.writeObject( status.toString() ); + // todo : potentially look at optimizing these two arrays + oos.writeObject( loadedState ); + oos.writeObject( deletedState ); + oos.writeObject( version ); + oos.writeObject( lockMode.toString() ); + oos.writeBoolean( existsInDatabase ); + oos.writeBoolean( isBeingReplicated ); + oos.writeBoolean( loadedWithLazyPropertiesUnfetched ); + } + + /** + * Custom deserialization routine used during deserialization of a + * Session/PersistenceContext for increased performance. + * + * @param ois The stream from which to read the entry. + * @param session The session being deserialized. + * @return The deserialized EntityEntry + * @throws IOException + * @throws ClassNotFoundException + */ + static EntityEntry deserialize( + ObjectInputStream ois, + SessionImplementor session) throws IOException, ClassNotFoundException { + return new EntityEntry( + session.getFactory(), + ( String ) ois.readObject(), + ( Serializable ) ois.readObject(), + EntityMode.parse( ( String ) ois.readObject() ), + Status.parse( ( String ) ois.readObject() ), + ( Object[] ) ois.readObject(), + ( Object[] ) ois.readObject(), + ( Object ) ois.readObject(), + LockMode.parse( ( String ) ois.readObject() ), + ois.readBoolean(), + ois.readBoolean(), + ois.readBoolean() + ); + } +} diff --git a/src/org/hibernate/engine/EntityKey.java b/src/org/hibernate/engine/EntityKey.java new file mode 100644 index 0000000000..6f53528fa7 --- /dev/null +++ b/src/org/hibernate/engine/EntityKey.java @@ -0,0 +1,156 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; + +import org.hibernate.AssertionFailure; +import org.hibernate.EntityMode; +import org.hibernate.util.SerializationHelper; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.Type; + +/** + * Uniquely identifies of an entity instance in a particular session by identifier. + *

    + * Uniqueing information consists of the entity-name and the identifier value. + * + * @see EntityUniqueKey + * @author Gavin King + */ +public final class EntityKey implements Serializable { + private final Serializable identifier; + private final String rootEntityName; + private final String entityName; + private final Type identifierType; + private final boolean isBatchLoadable; + private final SessionFactoryImplementor factory; + private final int hashCode; + private final EntityMode entityMode; + + /** + * Construct a unique identifier for an entity class instance + */ + public EntityKey(Serializable id, EntityPersister persister, EntityMode entityMode) { + if ( id == null ) { + throw new AssertionFailure( "null identifier" ); + } + this.identifier = id; + this.entityMode = entityMode; + this.rootEntityName = persister.getRootEntityName(); + this.entityName = persister.getEntityName(); + this.identifierType = persister.getIdentifierType(); + this.isBatchLoadable = persister.isBatchLoadable(); + this.factory = persister.getFactory(); + hashCode = generateHashCode(); //cache the hashcode + } + + /** + * Used to reconstruct an EntityKey during deserialization. + * + * @param identifier The identifier value + * @param rootEntityName The root entity name + * @param entityName The specific entity name + * @param identifierType The type of the identifier value + * @param batchLoadable Whether represented entity is eligible for batch loading + * @param factory The session factory + * @param entityMode The entity's entity mode + */ + private EntityKey( + Serializable identifier, + String rootEntityName, + String entityName, + Type identifierType, + boolean batchLoadable, + SessionFactoryImplementor factory, + EntityMode entityMode) { + this.identifier = identifier; + this.rootEntityName = rootEntityName; + this.entityName = entityName; + this.identifierType = identifierType; + this.isBatchLoadable = batchLoadable; + this.factory = factory; + this.entityMode = entityMode; + this.hashCode = generateHashCode(); + } + + public boolean isBatchLoadable() { + return isBatchLoadable; + } + + /** + * Get the user-visible identifier + */ + public Serializable getIdentifier() { + return identifier; + } + + public String getEntityName() { + return entityName; + } + + public boolean equals(Object other) { + EntityKey otherKey = (EntityKey) other; + return otherKey.rootEntityName.equals(this.rootEntityName) && + identifierType.isEqual(otherKey.identifier, this.identifier, entityMode, factory); + } + + private int generateHashCode() { + int result = 17; + result = 37 * result + rootEntityName.hashCode(); + result = 37 * result + identifierType.getHashCode( identifier, entityMode, factory ); + return result; + } + + public int hashCode() { + return hashCode; + } + + public String toString() { + return "EntityKey" + + MessageHelper.infoString( factory.getEntityPersister( entityName ), identifier, factory ); + } + + /** + * Custom serialization routine used during serialization of a + * Session/PersistenceContext for increased performance. + * + * @param oos The stream to which we should write the serial data. + * @throws IOException + */ + void serialize(ObjectOutputStream oos) throws IOException { + oos.writeObject( identifier ); + oos.writeObject( rootEntityName ); + oos.writeObject( entityName ); + oos.writeObject( identifierType ); + oos.writeBoolean( isBatchLoadable ); + oos.writeObject( entityMode.toString() ); + } + + /** + * Custom deserialization routine used during deserialization of a + * Session/PersistenceContext for increased performance. + * + * @param ois The stream from which to read the entry. + * @param session The session being deserialized. + * @return The deserialized EntityEntry + * @throws IOException + * @throws ClassNotFoundException + */ + static EntityKey deserialize( + ObjectInputStream ois, + SessionImplementor session) throws IOException, ClassNotFoundException { + return new EntityKey( + ( Serializable ) ois.readObject(), + ( String ) ois.readObject(), + ( String ) ois.readObject(), + ( Type ) ois.readObject(), + ois.readBoolean(), + session.getFactory(), + EntityMode.parse( ( String ) ois.readObject() ) + ); + } +} diff --git a/src/org/hibernate/engine/EntityUniqueKey.java b/src/org/hibernate/engine/EntityUniqueKey.java new file mode 100755 index 0000000000..7cf419dfd1 --- /dev/null +++ b/src/org/hibernate/engine/EntityUniqueKey.java @@ -0,0 +1,137 @@ +//$Id$ +package org.hibernate.engine; + +import org.hibernate.EntityMode; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.Type; + +import java.io.Serializable; +import java.io.ObjectOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; + +/** + * Used to uniquely key an entity instance in relation to a particular session + * by some unique property reference, as opposed to identifier. + *

    + * Uniqueing information consists of the entity-name, the referenced + * property name, and the referenced property value. + * + * @see EntityKey + * @author Gavin King + */ +public class EntityUniqueKey implements Serializable { + private final String uniqueKeyName; + private final String entityName; + private final Object key; + private final Type keyType; + private final EntityMode entityMode; + private final int hashCode; + + public EntityUniqueKey( + final String entityName, + final String uniqueKeyName, + final Object semiResolvedKey, + final Type keyType, + final EntityMode entityMode, + final SessionFactoryImplementor factory + ) { + this.uniqueKeyName = uniqueKeyName; + this.entityName = entityName; + this.key = semiResolvedKey; + this.keyType = keyType.getSemiResolvedType(factory); + this.entityMode = entityMode; + this.hashCode = generateHashCode(factory); + } + + public String getEntityName() { + return entityName; + } + + public Object getKey() { + return key; + } + + public String getUniqueKeyName() { + return uniqueKeyName; + } + + public int generateHashCode(SessionFactoryImplementor factory) { + int result = 17; + result = 37 * result + entityName.hashCode(); + result = 37 * result + uniqueKeyName.hashCode(); + result = 37 * result + keyType.getHashCode(key, entityMode, factory); + return result; + } + + public int hashCode() { + return hashCode; + } + + public boolean equals(Object other) { + EntityUniqueKey that = (EntityUniqueKey) other; + return that.entityName.equals(entityName) && + that.uniqueKeyName.equals(uniqueKeyName) && + keyType.isEqual(that.key, key, entityMode); + } + + public String toString() { + return "EntityUniqueKey" + MessageHelper.infoString(entityName, uniqueKeyName, key); + } + + private void writeObject(ObjectOutputStream oos) throws IOException { + checkAbilityToSerialize(); + oos.defaultWriteObject(); + } + + private void checkAbilityToSerialize() { + // The unique property value represented here may or may not be + // serializable, so we do an explicit check here in order to generate + // a better error message + if ( key != null && ! Serializable.class.isAssignableFrom( key.getClass() ) ) { + throw new IllegalStateException( + "Cannot serialize an EntityUniqueKey which represents a non " + + "serializable property value [" + entityName + "." + uniqueKeyName + "]" + ); + } + } + + /** + * Custom serialization routine used during serialization of a + * Session/PersistenceContext for increased performance. + * + * @param oos The stream to which we should write the serial data. + * @throws IOException + */ + void serialize(ObjectOutputStream oos) throws IOException { + checkAbilityToSerialize(); + oos.writeObject( uniqueKeyName ); + oos.writeObject( entityName ); + oos.writeObject( key ); + oos.writeObject( keyType ); + oos.writeObject( entityMode ); + } + + /** + * Custom deserialization routine used during deserialization of a + * Session/PersistenceContext for increased performance. + * + * @param ois The stream from which to read the entry. + * @param session The session being deserialized. + * @return The deserialized EntityEntry + * @throws IOException + * @throws ClassNotFoundException + */ + static EntityUniqueKey deserialize( + ObjectInputStream ois, + SessionImplementor session) throws IOException, ClassNotFoundException { + return new EntityUniqueKey( + ( String ) ois.readObject(), + ( String ) ois.readObject(), + ois.readObject(), + ( Type ) ois.readObject(), + ( EntityMode ) ois.readObject(), + session.getFactory() + ); + } +} diff --git a/src/org/hibernate/engine/ExecuteUpdateResultCheckStyle.java b/src/org/hibernate/engine/ExecuteUpdateResultCheckStyle.java new file mode 100644 index 0000000000..1c6bc15f91 --- /dev/null +++ b/src/org/hibernate/engine/ExecuteUpdateResultCheckStyle.java @@ -0,0 +1,73 @@ +package org.hibernate.engine; + +import java.io.Serializable; +import java.io.ObjectStreamException; +import java.io.InvalidObjectException; + +/** + * For persistence operations (INSERT, UPDATE, DELETE) what style of determining + * results (success/failure) is to be used. + * + * @author Steve Ebersole + */ +public class ExecuteUpdateResultCheckStyle implements Serializable { + /** + * Do not perform checking. Either user simply does not want checking, or is + * indicating a {@link java.sql.CallableStatement} execution in which the + * checks are being performed explicitly and failures are handled through + * propogation of {@link java.sql.SQLException}s. + */ + public static final ExecuteUpdateResultCheckStyle NONE = new ExecuteUpdateResultCheckStyle( "none" ); + /** + * Perform row-count checking. Row counts are the int values returned by both + * {@link java.sql.PreparedStatement#executeUpdate()} and + * {@link java.sql.Statement#executeBatch()}. These values are checked + * against some expected count. + */ + public static final ExecuteUpdateResultCheckStyle COUNT = new ExecuteUpdateResultCheckStyle( "rowcount" ); + /** + * Essentially the same as {@link #COUNT} except that the row count actually + * comes from an output parameter registered as part of a + * {@link java.sql.CallableStatement}. This style explicitly prohibits + * statement batching from being used... + */ + public static final ExecuteUpdateResultCheckStyle PARAM = new ExecuteUpdateResultCheckStyle( "param" ); + + private final String name; + + private ExecuteUpdateResultCheckStyle(String name) { + this.name = name; + } + + private Object readResolve() throws ObjectStreamException { + Object resolved = parse( name ); + if ( resolved == null ) { + throw new InvalidObjectException( "unknown result style [" + name + "]" ); + } + return resolved; + } + + public static ExecuteUpdateResultCheckStyle parse(String name) { + if ( name.equals( NONE.name ) ) { + return NONE; + } + else if ( name.equals( COUNT.name ) ) { + return COUNT; + } + else if ( name.equals( PARAM.name ) ) { + return PARAM; + } + else { + return null; + } + } + + public static ExecuteUpdateResultCheckStyle determineDefault(String customSql, boolean callable) { + if ( customSql == null ) { + return COUNT; + } + else { + return callable ? PARAM : COUNT; + } + } +} diff --git a/src/org/hibernate/engine/FilterDefinition.java b/src/org/hibernate/engine/FilterDefinition.java new file mode 100644 index 0000000000..0585274a36 --- /dev/null +++ b/src/org/hibernate/engine/FilterDefinition.java @@ -0,0 +1,69 @@ +// $Id$ +package org.hibernate.engine; + +import org.hibernate.type.Type; + +import java.io.Serializable; +import java.util.Map; +import java.util.HashMap; +import java.util.Set; + +/** + * A FilterDefinition defines the global attributes of a dynamic filter. This + * information includes its name as well as its defined parameters (name and type). + * + * @author Steve Ebersole + */ +public class FilterDefinition implements Serializable { + private final String filterName; + private final String defaultFilterCondition; + private final Map parameterTypes = new HashMap(); + + /** + * Construct a new FilterDefinition instance. + * + * @param name The name of the filter for which this configuration is in effect. + */ + public FilterDefinition(String name, String defaultCondition, Map parameterTypes) { + this.filterName = name; + this.defaultFilterCondition = defaultCondition; + this.parameterTypes.putAll( parameterTypes ); + } + + /** + * Get the name of the filter this configuration defines. + * + * @return The filter name for this configuration. + */ + public String getFilterName() { + return filterName; + } + + /** + * Get a set of the parameters defined by this configuration. + * + * @return The parameters named by this configuration. + */ + public Set getParameterNames() { + return parameterTypes.keySet(); + } + + /** + * Retreive the type of the named parameter defined for this filter. + * + * @param parameterName The name of the filter parameter for which to return the type. + * @return The type of the named parameter. + */ + public Type getParameterType(String parameterName) { + return (Type) parameterTypes.get(parameterName); + } + + public String getDefaultFilterCondition() { + return defaultFilterCondition; + } + + public Map getParameterTypes() { + return parameterTypes; + } + +} diff --git a/src/org/hibernate/engine/ForeignKeys.java b/src/org/hibernate/engine/ForeignKeys.java new file mode 100755 index 0000000000..735da3796d --- /dev/null +++ b/src/org/hibernate/engine/ForeignKeys.java @@ -0,0 +1,230 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.TransientObjectException; +import org.hibernate.intercept.LazyPropertyInitializer; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; + +/** + * Algorithms related to foreign key constraint transparency + * + * @author Gavin King + */ +public final class ForeignKeys { + + private ForeignKeys() {} + + public static class Nullifier { + + private final boolean isDelete; + private final boolean isEarlyInsert; + private final SessionImplementor session; + private final Object self; + + public Nullifier(Object self, boolean isDelete, boolean isEarlyInsert, SessionImplementor session) { + this.isDelete = isDelete; + this.isEarlyInsert = isEarlyInsert; + this.session = session; + this.self = self; + } + + /** + * Nullify all references to entities that have not yet + * been inserted in the database, where the foreign key + * points toward that entity + */ + public void nullifyTransientReferences(final Object[] values, final Type[] types) + throws HibernateException { + for ( int i = 0; i < types.length; i++ ) { + values[i] = nullifyTransientReferences( values[i], types[i] ); + } + } + + /** + * Return null if the argument is an "unsaved" entity (ie. + * one with no existing database row), or the input argument + * otherwise. This is how Hibernate avoids foreign key constraint + * violations. + */ + private Object nullifyTransientReferences(final Object value, final Type type) + throws HibernateException { + if ( value == null ) { + return null; + } + else if ( type.isEntityType() ) { + EntityType entityType = (EntityType) type; + if ( entityType.isOneToOne() ) { + return value; + } + else { + String entityName = entityType.getAssociatedEntityName(); + return isNullifiable(entityName, value) ? null : value; + } + } + else if ( type.isAnyType() ) { + return isNullifiable(null, value) ? null : value; + } + else if ( type.isComponentType() ) { + AbstractComponentType actype = (AbstractComponentType) type; + Object[] subvalues = actype.getPropertyValues(value, session); + Type[] subtypes = actype.getSubtypes(); + boolean substitute = false; + for ( int i = 0; i < subvalues.length; i++ ) { + Object replacement = nullifyTransientReferences( subvalues[i], subtypes[i] ); + if ( replacement != subvalues[i] ) { + substitute = true; + subvalues[i] = replacement; + } + } + if (substitute) actype.setPropertyValues( value, subvalues, session.getEntityMode() ); + return value; + } + else { + return value; + } + } + + /** + * Determine if the object already exists in the database, + * using a "best guess" + */ + private boolean isNullifiable(final String entityName, Object object) + throws HibernateException { + + if (object==LazyPropertyInitializer.UNFETCHED_PROPERTY) return false; //this is kinda the best we can do... + + if ( object instanceof HibernateProxy ) { + // if its an uninitialized proxy it can't be transient + LazyInitializer li = ( (HibernateProxy) object ).getHibernateLazyInitializer(); + if ( li.getImplementation(session) == null ) { + return false; + // ie. we never have to null out a reference to + // an uninitialized proxy + } + else { + //unwrap it + object = li.getImplementation(); + } + } + + // if it was a reference to self, don't need to nullify + // unless we are using native id generation, in which + // case we definitely need to nullify + if ( object == self ) { + return isEarlyInsert || ( + isDelete && + session.getFactory() + .getDialect() + .hasSelfReferentialForeignKeyBug() + ); + } + + // See if the entity is already bound to this session, if not look at the + // entity identifier and assume that the entity is persistent if the + // id is not "unsaved" (that is, we rely on foreign keys to keep + // database integrity) + + EntityEntry entityEntry = session.getPersistenceContext().getEntry(object); + if ( entityEntry==null ) { + return isTransient(entityName, object, null, session); + } + else { + return entityEntry.isNullifiable(isEarlyInsert, session); + } + + } + + } + + /** + * Is this instance persistent or detached? + * If assumed is non-null, don't hit the database to make the + * determination, instead assume that value; the client code must be + * prepared to "recover" in the case that this assumed result is incorrect. + */ + public static boolean isNotTransient(String entityName, Object entity, Boolean assumed, SessionImplementor session) + throws HibernateException { + if (entity instanceof HibernateProxy) return true; + if ( session.getPersistenceContext().isEntryFor(entity) ) return true; + return !isTransient(entityName, entity, assumed, session); + } + + /** + * Is this instance, which we know is not persistent, actually transient? + * If assumed is non-null, don't hit the database to make the + * determination, instead assume that value; the client code must be + * prepared to "recover" in the case that this assumed result is incorrect. + */ + public static boolean isTransient(String entityName, Object entity, Boolean assumed, SessionImplementor session) + throws HibernateException { + + if (entity==LazyPropertyInitializer.UNFETCHED_PROPERTY) { + // an unfetched association can only point to + // an entity that already exists in the db + return false; + } + + // let the interceptor inspect the instance to decide + Boolean isUnsaved = session.getInterceptor().isTransient(entity); + if (isUnsaved!=null) return isUnsaved.booleanValue(); + + // let the persister inspect the instance to decide + EntityPersister persister = session.getEntityPersister(entityName, entity); + isUnsaved = persister.isTransient(entity, session); + if (isUnsaved!=null) return isUnsaved.booleanValue(); + + // we use the assumed value, if there is one, to avoid hitting + // the database + if (assumed!=null) return assumed.booleanValue(); + + // hit the database, after checking the session cache for a snapshot + Object[] snapshot = session.getPersistenceContext() + .getDatabaseSnapshot( persister.getIdentifier( entity, session.getEntityMode() ), persister ); + return snapshot==null; + + } + + /** + * Return the identifier of the persistent or transient object, or throw + * an exception if the instance is "unsaved" + * + * Used by OneToOneType and ManyToOneType to determine what id value should + * be used for an object that may or may not be associated with the session. + * This does a "best guess" using any/all info available to use (not just the + * EntityEntry). + */ + public static Serializable getEntityIdentifierIfNotUnsaved( + final String entityName, + final Object object, + final SessionImplementor session) + throws HibernateException { + if ( object == null ) { + return null; + } + else { + Serializable id = session.getContextEntityIdentifier( object ); + if ( id == null ) { + // context-entity-identifier returns null explicitly if the entity + // is not associated with the persistence context; so make some + // deeper checks... + if ( isTransient(entityName, object, Boolean.FALSE, session) ) { + throw new TransientObjectException( + "object references an unsaved transient instance - save the transient instance before flushing: " + + (entityName == null ? session.guessEntityName( object ) : entityName) + ); + } + id = session.getEntityPersister( entityName, object ).getIdentifier( object, session.getEntityMode() ); + } + return id; + } + } + +} diff --git a/src/org/hibernate/engine/HibernateIterator.java b/src/org/hibernate/engine/HibernateIterator.java new file mode 100644 index 0000000000..36759863e6 --- /dev/null +++ b/src/org/hibernate/engine/HibernateIterator.java @@ -0,0 +1,15 @@ +//$Id$ +package org.hibernate.engine; + +import org.hibernate.JDBCException; + +import java.util.Iterator; + +/** + * An iterator that may be "closed" + * @see org.hibernate.Hibernate#close(java.util.Iterator) + * @author Gavin King + */ +public interface HibernateIterator extends Iterator { + public void close() throws JDBCException; +} diff --git a/src/org/hibernate/engine/IdentifierValue.java b/src/org/hibernate/engine/IdentifierValue.java new file mode 100755 index 0000000000..4944f860ff --- /dev/null +++ b/src/org/hibernate/engine/IdentifierValue.java @@ -0,0 +1,115 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * A strategy for determining if an identifier value is an identifier of + * a new transient instance or a previously persistent transient instance. + * The strategy is determined by the unsaved-value attribute in + * the mapping file. + * + * @author Gavin King + */ +public class IdentifierValue { + + private static final Log log = LogFactory.getLog(IdentifierValue.class); + + private final Serializable value; + + /** + * Always assume the transient instance is newly instantiated + */ + public static final IdentifierValue ANY = new IdentifierValue() { + public final Boolean isUnsaved(Serializable id) { + log.trace("id unsaved-value strategy ANY"); + return Boolean.TRUE; + } + public Serializable getDefaultValue(Serializable currentValue) { + return currentValue; + } + public String toString() { + return "SAVE_ANY"; + } + }; + + /** + * Never assume the transient instance is newly instantiated + */ + public static final IdentifierValue NONE = new IdentifierValue() { + public final Boolean isUnsaved(Serializable id) { + log.trace("id unsaved-value strategy NONE"); + return Boolean.FALSE; + } + public Serializable getDefaultValue(Serializable currentValue) { + return currentValue; + } + public String toString() { + return "SAVE_NONE"; + } + }; + + /** + * Assume the transient instance is newly instantiated if the identifier + * is null. + */ + public static final IdentifierValue NULL = new IdentifierValue() { + public final Boolean isUnsaved(Serializable id) { + log.trace("id unsaved-value strategy NULL"); + return id==null ? Boolean.TRUE : Boolean.FALSE; + } + public Serializable getDefaultValue(Serializable currentValue) { + return null; + } + public String toString() { + return "SAVE_NULL"; + } + }; + + /** + * Assume nothing. + */ + public static final IdentifierValue UNDEFINED = new IdentifierValue() { + public final Boolean isUnsaved(Serializable id) { + log.trace("id unsaved-value strategy UNDEFINED"); + return null; + } + public Serializable getDefaultValue(Serializable currentValue) { + return null; + } + public String toString() { + return "UNDEFINED"; + } + }; + + protected IdentifierValue() { + this.value = null; + } + + /** + * Assume the transient instance is newly instantiated if + * its identifier is null or equal to value + */ + public IdentifierValue(Serializable value) { + this.value = value; + } + + /** + * Does the given identifier belong to a new instance? + */ + public Boolean isUnsaved(Serializable id) { + if ( log.isTraceEnabled() ) log.trace("id unsaved-value: " + value); + return id==null || id.equals(value) ? Boolean.TRUE : Boolean.FALSE; + } + + public Serializable getDefaultValue(Serializable currentValue) { + return value; + } + + public String toString() { + return "identifier unsaved-value: " + value; + } +} \ No newline at end of file diff --git a/src/org/hibernate/engine/JoinHelper.java b/src/org/hibernate/engine/JoinHelper.java new file mode 100755 index 0000000000..e7b753c9a2 --- /dev/null +++ b/src/org/hibernate/engine/JoinHelper.java @@ -0,0 +1,157 @@ +//$Id$ +package org.hibernate.engine; + +import org.hibernate.persister.entity.Joinable; +import org.hibernate.persister.entity.OuterJoinLoadable; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.type.AssociationType; +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.StringHelper; + +/** + * @author Gavin King + */ +public final class JoinHelper { + + private JoinHelper() {} + + /** + * Get the aliased columns of the owning entity which are to + * be used in the join + */ + public static String[] getAliasedLHSColumnNames( + AssociationType type, + String alias, + int property, + OuterJoinLoadable lhsPersister, + Mapping mapping + ) { + return getAliasedLHSColumnNames(type, alias, property, 0, lhsPersister, mapping); + } + + /** + * Get the columns of the owning entity which are to + * be used in the join + */ + public static String[] getLHSColumnNames( + AssociationType type, + int property, + OuterJoinLoadable lhsPersister, + Mapping mapping + ) { + return getLHSColumnNames(type, property, 0, lhsPersister, mapping); + } + + /** + * Get the aliased columns of the owning entity which are to + * be used in the join + */ + public static String[] getAliasedLHSColumnNames( + AssociationType type, + String alias, + int property, + int begin, + OuterJoinLoadable lhsPersister, + Mapping mapping + ) { + if ( type.useLHSPrimaryKey() ) { + return StringHelper.qualify( alias, lhsPersister.getIdentifierColumnNames() ); + } + else { + String propertyName = type.getLHSPropertyName(); + if (propertyName==null) { + return ArrayHelper.slice( + lhsPersister.toColumns(alias, property), + begin, + type.getColumnSpan(mapping) + ); + } + else { + return ( (PropertyMapping) lhsPersister ).toColumns(alias, propertyName); //bad cast + } + } + } + + /** + * Get the columns of the owning entity which are to + * be used in the join + */ + public static String[] getLHSColumnNames( + AssociationType type, + int property, + int begin, + OuterJoinLoadable lhsPersister, + Mapping mapping + ) { + if ( type.useLHSPrimaryKey() ) { + //return lhsPersister.getSubclassPropertyColumnNames(property); + return lhsPersister.getIdentifierColumnNames(); + } + else { + String propertyName = type.getLHSPropertyName(); + if (propertyName==null) { + //slice, to get the columns for this component + //property + return ArrayHelper.slice( + lhsPersister.getSubclassPropertyColumnNames(property), + begin, + type.getColumnSpan(mapping) + ); + } + else { + //property-refs for associations defined on a + //component are not supported, so no need to slice + return lhsPersister.getPropertyColumnNames(propertyName); + } + } + } + + public static String getLHSTableName( + AssociationType type, + int property, + OuterJoinLoadable lhsPersister + ) { + if ( type.useLHSPrimaryKey() ) { + return lhsPersister.getTableName(); + } + else { + String propertyName = type.getLHSPropertyName(); + if (propertyName==null) { + //if there is no property-ref, assume the join + //is to the subclass table (ie. the table of the + //subclass that the association belongs to) + return lhsPersister.getSubclassPropertyTableName(property); + } + else { + //handle a property-ref + String propertyRefTable = lhsPersister.getPropertyTableName(propertyName); + if (propertyRefTable==null) { + //it is possible that the tree-walking in OuterJoinLoader can get to + //an association defined by a subclass, in which case the property-ref + //might refer to a property defined on a subclass of the current class + //in this case, the table name is not known - this temporary solution + //assumes that the property-ref refers to a property of the subclass + //table that the association belongs to (a reasonable guess) + //TODO: fix this, add: OuterJoinLoadable.getSubclassPropertyTableName(String propertyName) + propertyRefTable = lhsPersister.getSubclassPropertyTableName(property); + } + return propertyRefTable; + } + } + } + + /** + * Get the columns of the associated table which are to + * be used in the join + */ + public static String[] getRHSColumnNames(AssociationType type, SessionFactoryImplementor factory) { + String uniqueKeyPropertyName = type.getRHSUniqueKeyPropertyName(); + Joinable joinable = type.getAssociatedJoinable(factory); + if (uniqueKeyPropertyName==null) { + return joinable.getKeyColumnNames(); + } + else { + return ( (OuterJoinLoadable) joinable ).getPropertyColumnNames(uniqueKeyPropertyName); + } + } +} diff --git a/src/org/hibernate/engine/JoinSequence.java b/src/org/hibernate/engine/JoinSequence.java new file mode 100644 index 0000000000..34244d8e2e --- /dev/null +++ b/src/org/hibernate/engine/JoinSequence.java @@ -0,0 +1,267 @@ +//$Id$ +package org.hibernate.engine; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.hibernate.MappingException; +import org.hibernate.persister.entity.Joinable; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.QueryJoinFragment; +import org.hibernate.type.AssociationType; +import org.hibernate.util.CollectionHelper; + +/** + * @author Gavin King + */ +public class JoinSequence { + + private final SessionFactoryImplementor factory; + private final List joins = new ArrayList(); + private boolean useThetaStyle = false; + private final StringBuffer conditions = new StringBuffer(); + private String rootAlias; + private Joinable rootJoinable; + private Selector selector; + private JoinSequence next; + private boolean isFromPart = false; + + public String toString() { + StringBuffer buf = new StringBuffer(); + buf.append( "JoinSequence{" ); + if ( rootJoinable != null ) { + buf.append( rootJoinable ) + .append( '[' ) + .append( rootAlias ) + .append( ']' ); + } + for ( int i = 0; i < joins.size(); i++ ) { + buf.append( "->" ).append( joins.get( i ) ); + } + return buf.append( '}' ).toString(); + } + + final class Join { + + private final AssociationType associationType; + private final Joinable joinable; + private final int joinType; + private final String alias; + private final String[] lhsColumns; + + Join(AssociationType associationType, String alias, int joinType, String[] lhsColumns) + throws MappingException { + this.associationType = associationType; + this.joinable = associationType.getAssociatedJoinable( factory ); + this.alias = alias; + this.joinType = joinType; + this.lhsColumns = lhsColumns; + } + + String getAlias() { + return alias; + } + + AssociationType getAssociationType() { + return associationType; + } + + Joinable getJoinable() { + return joinable; + } + + int getJoinType() { + return joinType; + } + + String[] getLHSColumns() { + return lhsColumns; + } + + public String toString() { + return joinable.toString() + '[' + alias + ']'; + } + } + + public JoinSequence(SessionFactoryImplementor factory) { + this.factory = factory; + } + + public JoinSequence getFromPart() { + JoinSequence fromPart = new JoinSequence( factory ); + fromPart.joins.addAll( this.joins ); + fromPart.useThetaStyle = this.useThetaStyle; + fromPart.rootAlias = this.rootAlias; + fromPart.rootJoinable = this.rootJoinable; + fromPart.selector = this.selector; + fromPart.next = this.next == null ? null : this.next.getFromPart(); + fromPart.isFromPart = true; + return fromPart; + } + + public JoinSequence copy() { + JoinSequence copy = new JoinSequence( factory ); + copy.joins.addAll( this.joins ); + copy.useThetaStyle = this.useThetaStyle; + copy.rootAlias = this.rootAlias; + copy.rootJoinable = this.rootJoinable; + copy.selector = this.selector; + copy.next = this.next == null ? null : this.next.copy(); + copy.isFromPart = this.isFromPart; + copy.conditions.append( this.conditions.toString() ); + return copy; + } + + public JoinSequence addJoin(AssociationType associationType, String alias, int joinType, String[] referencingKey) + throws MappingException { + joins.add( new Join( associationType, alias, joinType, referencingKey ) ); + return this; + } + + public JoinFragment toJoinFragment() throws MappingException { + return toJoinFragment( CollectionHelper.EMPTY_MAP, true ); + } + + public JoinFragment toJoinFragment(Map enabledFilters, boolean includeExtraJoins) throws MappingException { + return toJoinFragment( enabledFilters, includeExtraJoins, null, null ); + } + + public JoinFragment toJoinFragment( + Map enabledFilters, + boolean includeExtraJoins, + String withClauseFragment, + String withClauseJoinAlias) throws MappingException { + QueryJoinFragment joinFragment = new QueryJoinFragment( factory.getDialect(), useThetaStyle ); + if ( rootJoinable != null ) { + joinFragment.addCrossJoin( rootJoinable.getTableName(), rootAlias ); + String filterCondition = rootJoinable.filterFragment( rootAlias, enabledFilters ); + // JoinProcessor needs to know if the where clause fragment came from a dynamic filter or not so it + // can put the where clause fragment in the right place in the SQL AST. 'hasFilterCondition' keeps track + // of that fact. + joinFragment.setHasFilterCondition( joinFragment.addCondition( filterCondition ) ); + if (includeExtraJoins) { //TODO: not quite sure about the full implications of this! + addExtraJoins( joinFragment, rootAlias, rootJoinable, true ); + } + } + + Joinable last = rootJoinable; + + for ( int i = 0; i < joins.size(); i++ ) { + Join join = ( Join ) joins.get( i ); + String on = join.getAssociationType().getOnCondition( join.getAlias(), factory, enabledFilters ); + String condition = null; + if ( last != null && + isManyToManyRoot( last ) && + ( ( QueryableCollection ) last ).getElementType() == join.getAssociationType() ) { + // the current join represents the join between a many-to-many association table + // and its "target" table. Here we need to apply any additional filters + // defined specifically on the many-to-many + String manyToManyFilter = ( ( QueryableCollection ) last ) + .getManyToManyFilterFragment( join.getAlias(), enabledFilters ); + condition = "".equals( manyToManyFilter ) + ? on + : "".equals( on ) + ? manyToManyFilter + : on + " and " + manyToManyFilter; + } + else { + condition = on; + } + if ( withClauseFragment != null ) { + if ( join.getAlias().equals( withClauseJoinAlias ) ) { + condition += " and " + withClauseFragment; + } + } + joinFragment.addJoin( + join.getJoinable().getTableName(), + join.getAlias(), + join.getLHSColumns(), + JoinHelper.getRHSColumnNames( join.getAssociationType(), factory ), + join.joinType, + condition + ); + if (includeExtraJoins) { //TODO: not quite sure about the full implications of this! + addExtraJoins( joinFragment, join.getAlias(), join.getJoinable(), join.joinType == JoinFragment.INNER_JOIN ); + } + last = join.getJoinable(); + } + if ( next != null ) { + joinFragment.addFragment( next.toJoinFragment( enabledFilters, includeExtraJoins ) ); + } + joinFragment.addCondition( conditions.toString() ); + if ( isFromPart ) joinFragment.clearWherePart(); + return joinFragment; + } + + private boolean isManyToManyRoot(Joinable joinable) { + if ( joinable != null && joinable.isCollection() ) { + QueryableCollection persister = ( QueryableCollection ) joinable; + return persister.isManyToMany(); + } + return false; + } + + private boolean isIncluded(String alias) { + return selector != null && selector.includeSubclasses( alias ); + } + + private void addExtraJoins(JoinFragment joinFragment, String alias, Joinable joinable, boolean innerJoin) { + boolean include = isIncluded( alias ); + joinFragment.addJoins( joinable.fromJoinFragment( alias, innerJoin, include ), + joinable.whereJoinFragment( alias, innerJoin, include ) ); + } + + public JoinSequence addCondition(String condition) { + if ( condition.trim().length() != 0 ) { + if ( !condition.startsWith( " and " ) ) conditions.append( " and " ); + conditions.append( condition ); + } + return this; + } + + public JoinSequence addCondition(String alias, String[] columns, String condition) { + for ( int i = 0; i < columns.length; i++ ) { + conditions.append( " and " ) + .append( alias ) + .append( '.' ) + .append( columns[i] ) + .append( condition ); + } + return this; + } + + public JoinSequence setRoot(Joinable joinable, String alias) { + this.rootAlias = alias; + this.rootJoinable = joinable; + return this; + } + + public JoinSequence setNext(JoinSequence next) { + this.next = next; + return this; + } + + public JoinSequence setSelector(Selector s) { + this.selector = s; + return this; + } + + public JoinSequence setUseThetaStyle(boolean useThetaStyle) { + this.useThetaStyle = useThetaStyle; + return this; + } + + public boolean isThetaStyle() { + return useThetaStyle; + } + + public int getJoinCount() { + return joins.size(); + } + + public static interface Selector { + public boolean includeSubclasses(String alias); + } +} diff --git a/src/org/hibernate/engine/Mapping.java b/src/org/hibernate/engine/Mapping.java new file mode 100644 index 0000000000..542a055af0 --- /dev/null +++ b/src/org/hibernate/engine/Mapping.java @@ -0,0 +1,28 @@ +//$Id$ +package org.hibernate.engine; + +import org.hibernate.MappingException; +import org.hibernate.type.Type; + +/** + * Defines operations common to "compiled" mappings (ie. SessionFactory) + * and "uncompiled" mappings (ie. Configuration) that are used by + * implementors of Type. + * + * @see org.hibernate.type.Type + * @see org.hibernate.impl.SessionFactoryImpl + * @see org.hibernate.cfg.Configuration + * @author Gavin King + */ +public interface Mapping { + public Type getIdentifierType(String className) throws MappingException; + public String getIdentifierPropertyName(String className) throws MappingException; + public Type getReferencedPropertyType(String className, String propertyName) throws MappingException; +} + + + + + + + diff --git a/src/org/hibernate/engine/NamedQueryDefinition.java b/src/org/hibernate/engine/NamedQueryDefinition.java new file mode 100755 index 0000000000..c03baff7b4 --- /dev/null +++ b/src/org/hibernate/engine/NamedQueryDefinition.java @@ -0,0 +1,123 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.util.Map; + +import org.hibernate.CacheMode; +import org.hibernate.FlushMode; + + +/** + * Definition of a named query, defined in the mapping metadata. + * + * @author Gavin King + */ +public class NamedQueryDefinition implements Serializable { + private final String query; + private final boolean cacheable; + private final String cacheRegion; + private final Integer timeout; + private final Integer fetchSize; + private final FlushMode flushMode; + private final Map parameterTypes; + private CacheMode cacheMode; + private boolean readOnly; + private String comment; + + // kept for backward compatibility until after the 3.1beta5 release of HA + public NamedQueryDefinition( + String query, + boolean cacheable, + String cacheRegion, + Integer timeout, + Integer fetchSize, + FlushMode flushMode, + Map parameterTypes + ) { + this( + query, + cacheable, + cacheRegion, + timeout, + fetchSize, + flushMode, + null, + false, + null, + parameterTypes + ); + } + + public NamedQueryDefinition( + String query, + boolean cacheable, + String cacheRegion, + Integer timeout, + Integer fetchSize, + FlushMode flushMode, + CacheMode cacheMode, + boolean readOnly, + String comment, + Map parameterTypes + ) { + this.query = query; + this.cacheable = cacheable; + this.cacheRegion = cacheRegion; + this.timeout = timeout; + this.fetchSize = fetchSize; + this.flushMode = flushMode; + this.parameterTypes = parameterTypes; + this.cacheMode = cacheMode; + this.readOnly = readOnly; + this.comment = comment; + } + + public String getQueryString() { + return query; + } + + public boolean isCacheable() { + return cacheable; + } + + public String getCacheRegion() { + return cacheRegion; + } + + public Integer getFetchSize() { + return fetchSize; + } + + public Integer getTimeout() { + return timeout; + } + + public FlushMode getFlushMode() { + return flushMode; + } + + public String toString() { + return getClass().getName() + '(' + query + ')'; + } + + public Map getParameterTypes() { + return parameterTypes; + } + + public String getQuery() { + return query; + } + + public CacheMode getCacheMode() { + return cacheMode; + } + + public boolean isReadOnly() { + return readOnly; + } + + public String getComment() { + return comment; + } +} diff --git a/src/org/hibernate/engine/NamedSQLQueryDefinition.java b/src/org/hibernate/engine/NamedSQLQueryDefinition.java new file mode 100755 index 0000000000..6a0e1cc451 --- /dev/null +++ b/src/org/hibernate/engine/NamedSQLQueryDefinition.java @@ -0,0 +1,184 @@ +//$Id$ +package org.hibernate.engine; + +import java.util.List; +import java.util.Map; + +import org.hibernate.FlushMode; +import org.hibernate.CacheMode; +import org.hibernate.engine.query.sql.NativeSQLQueryReturn; + +/** + * Definition of a named native SQL query, defined + * in the mapping metadata. + * + * @author Max Andersen + */ +public class NamedSQLQueryDefinition extends NamedQueryDefinition { + + private NativeSQLQueryReturn[] queryReturns; + private final List querySpaces; + private final boolean callable; + private String resultSetRef; + + /** + * This form used to construct a NamedSQLQueryDefinition from the binder + * code when a the result-set mapping information is explicitly + * provided in the query definition (i.e., no resultset-mapping used) + * + * @param query The sql query string + * @param queryReturns The in-lined query return definitions + * @param querySpaces Any specified query spaces (used for auto-flushing) + * @param cacheable Whether the query results are cacheable + * @param cacheRegion If cacheable, the region into which to store the results + * @param timeout A JDBC-level timeout to be applied + * @param fetchSize A JDBC-level fetch-size to be applied + * @param flushMode The flush mode to use for this query + * @param cacheMode The cache mode to use during execution and subsequent result loading + * @param readOnly Whether returned entities should be marked as read-only in the session + * @param comment Any sql comment to be applied to the query + * @param parameterTypes parameter type map + * @param callable Does the query string represent a callable object (i.e., proc) + */ + public NamedSQLQueryDefinition( + String query, + NativeSQLQueryReturn[] queryReturns, + List querySpaces, + boolean cacheable, + String cacheRegion, + Integer timeout, + Integer fetchSize, + FlushMode flushMode, + CacheMode cacheMode, + boolean readOnly, + String comment, + Map parameterTypes, + boolean callable) { + super( + query.trim(), /* trim done to workaround stupid oracle bug that cant handle whitespaces before a { in a sp */ + cacheable, + cacheRegion, + timeout, + fetchSize, + flushMode, + cacheMode, + readOnly, + comment, + parameterTypes + ); + this.queryReturns = queryReturns; + this.querySpaces = querySpaces; + this.callable = callable; + } + + /** + * This form used to construct a NamedSQLQueryDefinition from the binder + * code when a resultset-mapping reference is used. + * + * @param query The sql query string + * @param resultSetRef The resultset-mapping name + * @param querySpaces Any specified query spaces (used for auto-flushing) + * @param cacheable Whether the query results are cacheable + * @param cacheRegion If cacheable, the region into which to store the results + * @param timeout A JDBC-level timeout to be applied + * @param fetchSize A JDBC-level fetch-size to be applied + * @param flushMode The flush mode to use for this query + * @param cacheMode The cache mode to use during execution and subsequent result loading + * @param readOnly Whether returned entities should be marked as read-only in the session + * @param comment Any sql comment to be applied to the query + * @param parameterTypes parameter type map + * @param callable Does the query string represent a callable object (i.e., proc) + */ + public NamedSQLQueryDefinition( + String query, + String resultSetRef, + List querySpaces, + boolean cacheable, + String cacheRegion, + Integer timeout, + Integer fetchSize, + FlushMode flushMode, + CacheMode cacheMode, + boolean readOnly, + String comment, + Map parameterTypes, + boolean callable) { + super( + query.trim(), /* trim done to workaround stupid oracle bug that cant handle whitespaces before a { in a sp */ + cacheable, + cacheRegion, + timeout, + fetchSize, + flushMode, + cacheMode, + readOnly, + comment, + parameterTypes + ); + this.resultSetRef = resultSetRef; + this.querySpaces = querySpaces; + this.callable = callable; + } + + /** + * This form used from annotations (?). Essentially the same as the above using a + * resultset-mapping reference, but without cacheMode, readOnly, and comment. + * + * FIXME: annotations do not use it, so it can be remove from my POV + * @deprecated + * + * @param query The sql query string + * @param resultSetRef The result-set-mapping name + * @param querySpaces Any specified query spaces (used for auto-flushing) + * @param cacheable Whether the query results are cacheable + * @param cacheRegion If cacheable, the region into which to store the results + * @param timeout A JDBC-level timeout to be applied + * @param fetchSize A JDBC-level fetch-size to be applied + * @param flushMode The flush mode to use for this query + * @param parameterTypes parameter type map + * @param callable Does the query string represent a callable object (i.e., proc) + */ + public NamedSQLQueryDefinition( + String query, + String resultSetRef, + List querySpaces, + boolean cacheable, + String cacheRegion, + Integer timeout, + Integer fetchSize, + FlushMode flushMode, + Map parameterTypes, + boolean callable) { + this( + query, + resultSetRef, + querySpaces, + cacheable, + cacheRegion, + timeout, + fetchSize, + flushMode, + null, + false, + null, + parameterTypes, + callable + ); + } + + public NativeSQLQueryReturn[] getQueryReturns() { + return queryReturns; + } + + public List getQuerySpaces() { + return querySpaces; + } + + public boolean isCallable() { + return callable; + } + + public String getResultSetRef() { + return resultSetRef; + } +} \ No newline at end of file diff --git a/src/org/hibernate/engine/Nullability.java b/src/org/hibernate/engine/Nullability.java new file mode 100755 index 0000000000..089c899b3c --- /dev/null +++ b/src/org/hibernate/engine/Nullability.java @@ -0,0 +1,183 @@ +//$Id$ +package org.hibernate.engine; + +import java.util.Iterator; + +import org.hibernate.HibernateException; +import org.hibernate.PropertyValueException; +import org.hibernate.intercept.LazyPropertyInitializer; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.Type; + +/** + * Implements the algorithm for validating property values + * for illegal null values + * @author Gavin King + */ +public final class Nullability { + + private final SessionImplementor session; + + public Nullability(SessionImplementor session) { + this.session = session; + } + /** + * Check nullability of the class persister properties + * + * @param values entity properties + * @param persister class persister + * @param isUpdate wether it is intended to be updated or saved + * @throws org.hibernate.PropertyValueException Break the nullability of one property + * @throws HibernateException error while getting Component values + */ + public void checkNullability( + final Object[] values, + final EntityPersister persister, + final boolean isUpdate) + throws PropertyValueException, HibernateException { + + /* + * Algorithm + * Check for any level one nullability breaks + * Look at non null components to + * recursively check next level of nullability breaks + * Look at Collections contraining component to + * recursively check next level of nullability breaks + * + * + * In the previous implementation, not-null stuffs where checked + * filtering by level one only updateable + * or insertable columns. So setting a sub component as update="false" + * has no effect on not-null check if the main component had good checkeability + * In this implementation, we keep this feature. + * However, I never see any documentation mentioning that, but it's for + * sure a limitation. + */ + + final boolean[] nullability = persister.getPropertyNullability(); + final boolean[] checkability = isUpdate ? + persister.getPropertyUpdateability() : + persister.getPropertyInsertability(); + final Type[] propertyTypes = persister.getPropertyTypes(); + + for ( int i = 0; i < values.length; i++ ) { + + if ( checkability[i] && values[i]!=LazyPropertyInitializer.UNFETCHED_PROPERTY ) { + final Object value = values[i]; + if ( !nullability[i] && value == null ) { + + //check basic level one nullablilty + throw new PropertyValueException( + "not-null property references a null or transient value", + persister.getEntityName(), + persister.getPropertyNames()[i] + ); + + } + else if ( value != null ) { + + //values is not null and is checkable, we'll look deeper + String breakProperties = checkSubElementsNullability( propertyTypes[i], value ); + if ( breakProperties != null ) { + throw new PropertyValueException( + "not-null property references a null or transient value", + persister.getEntityName(), + buildPropertyPath( persister.getPropertyNames()[i], breakProperties ) + ); + } + + } + } + + } + } + + /** + * check sub elements-nullability. Returns property path that break + * nullability or null if none + * + * @param propertyType type to check + * @param value value to check + * + * @return property path + * @throws HibernateException error while getting subcomponent values + */ + private String checkSubElementsNullability(final Type propertyType, final Object value) + throws HibernateException { + //for non null args, check for components and elements containing components + if ( propertyType.isComponentType() ) { + return checkComponentNullability( value, (AbstractComponentType) propertyType ); + } + else if ( propertyType.isCollectionType() ) { + + //persistent collections may have components + CollectionType collectionType = (CollectionType) propertyType; + Type collectionElementType = collectionType.getElementType( session.getFactory() ); + if ( collectionElementType.isComponentType() ) { + //check for all components values in the collection + + AbstractComponentType componentType = (AbstractComponentType) collectionElementType; + Iterator iter = CascadingAction.getLoadedElementsIterator(session, collectionType, value); + while ( iter.hasNext() ) { + Object compValue = iter.next(); + if (compValue != null) { + return checkComponentNullability(compValue, componentType); + } + } + } + } + return null; + } + + /** + * check component nullability. Returns property path that break + * nullability or null if none + * + * @param value component properties + * @param compType component not-nullable type + * + * @return property path + * @throws HibernateException error while getting subcomponent values + */ + private String checkComponentNullability(final Object value, final AbstractComponentType compType) + throws HibernateException { + /* will check current level if some of them are not null + * or sublevels if they exist + */ + boolean[] nullability = compType.getPropertyNullability(); + if ( nullability!=null ) { + //do the test + final Object[] values = compType.getPropertyValues( value, session.getEntityMode() ); + final Type[] propertyTypes = compType.getSubtypes(); + for ( int i=0; i + * Ideally would like to move to the parameter handling as it is done in + * the hql.ast package. + * + * @author Steve Ebersole + */ +public class ParameterBinder { + + private static final Log log = LogFactory.getLog( ParameterBinder.class ); + + public static interface NamedParameterSource { + public int[] getNamedParameterLocations(String name); + } + + private ParameterBinder() { + } + + public static int bindQueryParameters( + final PreparedStatement st, + final QueryParameters queryParameters, + final int start, + final NamedParameterSource source, + SessionImplementor session) throws SQLException, HibernateException { + int col = start; + col += bindPositionalParameters( st, queryParameters, col, session ); + col += bindNamedParameters( st, queryParameters, col, source, session ); + return col; + } + + public static int bindPositionalParameters( + final PreparedStatement st, + final QueryParameters queryParameters, + final int start, + final SessionImplementor session) throws SQLException, HibernateException { + return bindPositionalParameters( + st, + queryParameters.getPositionalParameterValues(), + queryParameters.getPositionalParameterTypes(), + start, + session + ); + } + + public static int bindPositionalParameters( + final PreparedStatement st, + final Object[] values, + final Type[] types, + final int start, + final SessionImplementor session) throws SQLException, HibernateException { + int span = 0; + for ( int i = 0; i < values.length; i++ ) { + types[i].nullSafeSet( st, values[i], start + span, session ); + span += types[i].getColumnSpan( session.getFactory() ); + } + return span; + } + + public static int bindNamedParameters( + final PreparedStatement ps, + final QueryParameters queryParameters, + final int start, + final NamedParameterSource source, + final SessionImplementor session) throws SQLException, HibernateException { + return bindNamedParameters( ps, queryParameters.getNamedParameters(), start, source, session ); + } + + public static int bindNamedParameters( + final PreparedStatement ps, + final Map namedParams, + final int start, + final NamedParameterSource source, + final SessionImplementor session) throws SQLException, HibernateException { + if ( namedParams != null ) { + // assumes that types are all of span 1 + Iterator iter = namedParams.entrySet().iterator(); + int result = 0; + while ( iter.hasNext() ) { + Map.Entry e = ( Map.Entry ) iter.next(); + String name = ( String ) e.getKey(); + TypedValue typedval = ( TypedValue ) e.getValue(); + int[] locations = source.getNamedParameterLocations( name ); + for ( int i = 0; i < locations.length; i++ ) { + if ( log.isDebugEnabled() ) { + log.debug( "bindNamedParameters() " + + typedval.getValue() + " -> " + name + + " [" + ( locations[i] + start ) + "]" ); + } + typedval.getType().nullSafeSet( ps, typedval.getValue(), locations[i] + start, session ); + } + result += locations.length; + } + return result; + } + else { + return 0; + } + } +} diff --git a/src/org/hibernate/engine/PersistenceContext.java b/src/org/hibernate/engine/PersistenceContext.java new file mode 100644 index 0000000000..6c6b31b891 --- /dev/null +++ b/src/org/hibernate/engine/PersistenceContext.java @@ -0,0 +1,445 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.util.HashSet; +import java.util.Map; + +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.engine.loading.LoadContexts; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; + +/** + * Holds the state of the persistence context, including the + * first-level cache, entries, snapshots, proxies, etc. + * + * @author Gavin King + */ +public interface PersistenceContext { + + public boolean isStateless(); + + /** + * Get the session to which this persistence context is bound. + * + * @return The session. + */ + public SessionImplementor getSession(); + + /** + * Retrieve this persistence context's managed load context. + * + * @return The load context + */ + public LoadContexts getLoadContexts(); + + /** + * Add a collection which has no owner loaded + */ + public void addUnownedCollection(CollectionKey key, PersistentCollection collection); + + /** + * Get and remove a collection whose owner is not yet loaded, + * when its owner is being loaded + */ + public PersistentCollection useUnownedCollection(CollectionKey key); + + /** + * Get the BatchFetchQueue, instantiating one if + * necessary. + */ + public BatchFetchQueue getBatchFetchQueue(); + + /** + * Clear the state of the persistence context + */ + public void clear(); + + /** + * @return false if we know for certain that all the entities are read-only + */ + public boolean hasNonReadOnlyEntities(); + + /** + * Set the status of an entry + */ + public void setEntryStatus(EntityEntry entry, Status status); + + /** + * Called after transactions end + */ + public void afterTransactionCompletion(); + + /** + * Get the current state of the entity as known to the underlying + * database, or null if there is no corresponding row + */ + public Object[] getDatabaseSnapshot(Serializable id, EntityPersister persister) + throws HibernateException; + + public Object[] getCachedDatabaseSnapshot(EntityKey key); + + /** + * Get the values of the natural id fields as known to the underlying + * database, or null if the entity has no natural id or there is no + * corresponding row. + */ + public Object[] getNaturalIdSnapshot(Serializable id, EntityPersister persister) + throws HibernateException; + + /** + * Add a canonical mapping from entity key to entity instance + */ + public void addEntity(EntityKey key, Object entity); + + /** + * Get the entity instance associated with the given + * EntityKey + */ + public Object getEntity(EntityKey key); + + /** + * Is there an entity with the given key in the persistence context + */ + public boolean containsEntity(EntityKey key); + + /** + * Remove an entity from the session cache, also clear + * up other state associated with the entity, all except + * for the EntityEntry + */ + public Object removeEntity(EntityKey key); + + /** + * Get an entity cached by unique key + */ + public Object getEntity(EntityUniqueKey euk); + + /** + * Add an entity to the cache by unique key + */ + public void addEntity(EntityUniqueKey euk, Object entity); + + /** + * Retreive the EntityEntry representation of the given entity. + * + * @param entity The entity for which to locate the EntityEntry. + * @return The EntityEntry for the given entity. + */ + public EntityEntry getEntry(Object entity); + + /** + * Remove an entity entry from the session cache + */ + public EntityEntry removeEntry(Object entity); + + /** + * Is there an EntityEntry for this instance? + */ + public boolean isEntryFor(Object entity); + + /** + * Get the collection entry for a persistent collection + */ + public CollectionEntry getCollectionEntry(PersistentCollection coll); + + /** + * Adds an entity to the internal caches. + */ + public EntityEntry addEntity(final Object entity, final Status status, + final Object[] loadedState, final EntityKey entityKey, final Object version, + final LockMode lockMode, final boolean existsInDatabase, + final EntityPersister persister, final boolean disableVersionIncrement, boolean lazyPropertiesAreUnfetched); + + /** + * Generates an appropriate EntityEntry instance and adds it + * to the event source's internal caches. + */ + public EntityEntry addEntry(final Object entity, final Status status, + final Object[] loadedState, final Object rowId, final Serializable id, + final Object version, final LockMode lockMode, final boolean existsInDatabase, + final EntityPersister persister, final boolean disableVersionIncrement, boolean lazyPropertiesAreUnfetched); + + /** + * Is the given collection associated with this persistence context? + */ + public boolean containsCollection(PersistentCollection collection); + + /** + * Is the given proxy associated with this persistence context? + */ + public boolean containsProxy(Object proxy); + + /** + * Takes the given object and, if it represents a proxy, reassociates it with this event source. + * + * @param value The possible proxy to be reassociated. + * @return Whether the passed value represented an actual proxy which got initialized. + * @throws MappingException + */ + public boolean reassociateIfUninitializedProxy(Object value) throws MappingException; + + /** + * If a deleted entity instance is re-saved, and it has a proxy, we need to + * reset the identifier of the proxy + */ + public void reassociateProxy(Object value, Serializable id) throws MappingException; + + /** + * Get the entity instance underlying the given proxy, throwing + * an exception if the proxy is uninitialized. If the given object + * is not a proxy, simply return the argument. + */ + public Object unproxy(Object maybeProxy) throws HibernateException; + + /** + * Possibly unproxy the given reference and reassociate it with the current session. + * + * @param maybeProxy The reference to be unproxied if it currently represents a proxy. + * @return The unproxied instance. + * @throws HibernateException + */ + public Object unproxyAndReassociate(Object maybeProxy) throws HibernateException; + + /** + * Attempts to check whether the given key represents an entity already loaded within the + * current session. + * @param object The entity reference against which to perform the uniqueness check. + * @throws HibernateException + */ + public void checkUniqueness(EntityKey key, Object object) throws HibernateException; + + /** + * If the existing proxy is insufficiently "narrow" (derived), instantiate a new proxy + * and overwrite the registration of the old one. This breaks == and occurs only for + * "class" proxies rather than "interface" proxies. Also init the proxy to point to + * the given target implementation if necessary. + * + * @param proxy The proxy instance to be narrowed. + * @param persister The persister for the proxied entity. + * @param key The internal cache key for the proxied entity. + * @param object (optional) the actual proxied entity instance. + * @return An appropriately narrowed instance. + * @throws HibernateException + */ + public Object narrowProxy(Object proxy, EntityPersister persister, EntityKey key, Object object) + throws HibernateException; + + /** + * Return the existing proxy associated with the given EntityKey, or the + * third argument (the entity associated with the key) if no proxy exists. Init + * the proxy to the target implementation, if necessary. + */ + public Object proxyFor(EntityPersister persister, EntityKey key, Object impl) + throws HibernateException; + + /** + * Return the existing proxy associated with the given EntityKey, or the + * argument (the entity associated with the key) if no proxy exists. + * (slower than the form above) + */ + public Object proxyFor(Object impl) throws HibernateException; + + /** + * Get the entity that owns this persistent collection + */ + public Object getCollectionOwner(Serializable key, CollectionPersister collectionPersister) + throws MappingException; + + /** + * add a collection we just loaded up (still needs initializing) + */ + public void addUninitializedCollection(CollectionPersister persister, + PersistentCollection collection, Serializable id); + + /** + * add a detached uninitialized collection + */ + public void addUninitializedDetachedCollection(CollectionPersister persister, + PersistentCollection collection); + + /** + * Add a new collection (ie. a newly created one, just instantiated by the + * application, with no database state or snapshot) + * @param collection The collection to be associated with the persistence context + */ + public void addNewCollection(CollectionPersister persister, PersistentCollection collection) + throws HibernateException; + + /** + * add an (initialized) collection that was created by another session and passed + * into update() (ie. one with a snapshot and existing state on the database) + */ + public void addInitializedDetachedCollection(CollectionPersister collectionPersister, + PersistentCollection collection) throws HibernateException; + + /** + * add a collection we just pulled out of the cache (does not need initializing) + */ + public CollectionEntry addInitializedCollection(CollectionPersister persister, + PersistentCollection collection, Serializable id) throws HibernateException; + + /** + * Get the collection instance associated with the CollectionKey + */ + public PersistentCollection getCollection(CollectionKey collectionKey); + + /** + * Register a collection for non-lazy loading at the end of the + * two-phase load + */ + public void addNonLazyCollection(PersistentCollection collection); + + /** + * Force initialization of all non-lazy collections encountered during + * the current two-phase load (actually, this is a no-op, unless this + * is the "outermost" load) + */ + public void initializeNonLazyCollections() throws HibernateException; + + /** + * Get the PersistentCollection object for an array + */ + public PersistentCollection getCollectionHolder(Object array); + + /** + * Register a PersistentCollection object for an array. + * Associates a holder with an array - MUST be called after loading + * array, since the array instance is not created until endLoad(). + */ + public void addCollectionHolder(PersistentCollection holder); + + /** + * Remove the mapping of collection to holder during eviction + * of the owning entity + */ + public PersistentCollection removeCollectionHolder(Object array); + + /** + * Get the snapshot of the pre-flush collection state + */ + public Serializable getSnapshot(PersistentCollection coll); + + /** + * Get the collection entry for a collection passed to filter, + * which might be a collection wrapper, an array, or an unwrapped + * collection. Return null if there is no entry. + */ + public CollectionEntry getCollectionEntryOrNull(Object collection); + + /** + * Get an existing proxy by key + */ + public Object getProxy(EntityKey key); + + /** + * Add a proxy to the session cache + */ + public void addProxy(EntityKey key, Object proxy); + + /** + * Remove a proxy from the session cache + */ + public Object removeProxy(EntityKey key); + + /** + * Retrieve the set of EntityKeys representing nullifiable references + */ + public HashSet getNullifiableEntityKeys(); + + /** + * Get the mapping from key value to entity instance + */ + public Map getEntitiesByKey(); + + /** + * Get the mapping from entity instance to entity entry + */ + public Map getEntityEntries(); + + /** + * Get the mapping from collection instance to collection entry + */ + public Map getCollectionEntries(); + + /** + * Get the mapping from collection key to collection instance + */ + public Map getCollectionsByKey(); + + /** + * How deep are we cascaded? + */ + public int getCascadeLevel(); + + /** + * Called before cascading + */ + public int incrementCascadeLevel(); + + /** + * Called after cascading + */ + public int decrementCascadeLevel(); + + /** + * Is a flush cycle currently in process? + */ + public boolean isFlushing(); + + /** + * Called before and after the flushcycle + */ + public void setFlushing(boolean flushing); + + /** + * Call this before begining a two-phase load + */ + public void beforeLoad(); + + /** + * Call this after finishing a two-phase load + */ + public void afterLoad(); + + /** + * Returns a string representation of the object. + * + * @return a string representation of the object. + */ + public String toString(); + + /** + * Search the persistence context for an owner for the child object, + * given a collection role + */ + public Serializable getOwnerId(String entity, String property, Object childObject, Map mergeMap); + + /** + * Search the persistence context for an index of the child object, + * given a collection role + */ + public Object getIndexInOwner(String entity, String property, Object childObject, Map mergeMap); + + /** + * Record the fact that the association belonging to the keyed + * entity is null. + */ + public void addNullProperty(EntityKey ownerKey, String propertyName); + + /** + * Is the association property belonging to the keyed entity null? + */ + public boolean isPropertyNull(EntityKey ownerKey, String propertyName); + + /** + * Set the object to read only and discard it's snapshot + */ + public void setReadOnly(Object entity, boolean readOnly); + + void replaceDelayedEntityIdentityInsertKeys(EntityKey oldKey, Serializable generatedId); +} diff --git a/src/org/hibernate/engine/QueryParameters.java b/src/org/hibernate/engine/QueryParameters.java new file mode 100644 index 0000000000..e4f35411b6 --- /dev/null +++ b/src/org/hibernate/engine/QueryParameters.java @@ -0,0 +1,482 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.StringTokenizer; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.QueryException; +import org.hibernate.ScrollMode; +import org.hibernate.dialect.Dialect; +import org.hibernate.hql.classic.ParserHelper; +import org.hibernate.pretty.Printer; +import org.hibernate.transform.ResultTransformer; +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; + +/** + * @author Gavin King + */ +public final class QueryParameters { + private static final Log log = LogFactory.getLog(QueryParameters.class); + + private Type[] positionalParameterTypes; + private Object[] positionalParameterValues; + private Map namedParameters; + private Map lockModes; + private RowSelection rowSelection; + private boolean cacheable; + private String cacheRegion; + private String comment; + private ScrollMode scrollMode; + private Serializable[] collectionKeys; + private Object optionalObject; + private String optionalEntityName; + private Serializable optionalId; + private boolean readOnly; + private boolean callable = false; + private boolean autodiscovertypes = false; + private boolean isNaturalKeyLookup; + + private final ResultTransformer resultTransformer; // why is all others non final ? + + private String processedSQL; + private Type[] processedPositionalParameterTypes; + private Object[] processedPositionalParameterValues; + + public QueryParameters() { + this( ArrayHelper.EMPTY_TYPE_ARRAY, ArrayHelper.EMPTY_OBJECT_ARRAY ); + } + + public QueryParameters(Type type, Object value) { + this( new Type[] {type}, new Object[] {value} ); + } + + public QueryParameters( + final Type[] positionalParameterTypes, + final Object[] postionalParameterValues, + final Object optionalObject, + final String optionalEntityName, + final Serializable optionalObjectId + ) { + this(positionalParameterTypes, postionalParameterValues); + this.optionalObject = optionalObject; + this.optionalId = optionalObjectId; + this.optionalEntityName = optionalEntityName; + + } + + public QueryParameters( + final Type[] positionalParameterTypes, + final Object[] postionalParameterValues + ) { + this( + positionalParameterTypes, + postionalParameterValues, + null, + null, + false, + null, + null, + false, + null + ); + } + + public QueryParameters( + final Type[] positionalParameterTypes, + final Object[] postionalParameterValues, + final Serializable[] collectionKeys + ) { + this( + positionalParameterTypes, + postionalParameterValues, + null, + collectionKeys + ); + } + + public QueryParameters( + final Type[] positionalParameterTypes, + final Object[] postionalParameterValues, + final Map namedParameters, + final Serializable[] collectionKeys + ) { + this( + positionalParameterTypes, + postionalParameterValues, + namedParameters, + null, + null, + false, + false, + null, + null, + collectionKeys, + null + ); + } + + public QueryParameters( + final Type[] positionalParameterTypes, + final Object[] positionalParameterValues, + final Map lockModes, + final RowSelection rowSelection, + final boolean cacheable, + final String cacheRegion, + //final boolean forceCacheRefresh, + final String comment, + final boolean isLookupByNaturalKey, + final ResultTransformer transformer + ) { + this( + positionalParameterTypes, + positionalParameterValues, + null, + lockModes, + rowSelection, + false, + cacheable, + cacheRegion, + comment, + null, + transformer + ); + isNaturalKeyLookup = isLookupByNaturalKey; + } + + public QueryParameters( + final Type[] positionalParameterTypes, + final Object[] positionalParameterValues, + final Map namedParameters, + final Map lockModes, + final RowSelection rowSelection, + final boolean readOnly, + final boolean cacheable, + final String cacheRegion, + //final boolean forceCacheRefresh, + final String comment, + final Serializable[] collectionKeys, + ResultTransformer transformer + ) { + this.positionalParameterTypes = positionalParameterTypes; + this.positionalParameterValues = positionalParameterValues; + this.namedParameters = namedParameters; + this.lockModes = lockModes; + this.rowSelection = rowSelection; + this.cacheable = cacheable; + this.cacheRegion = cacheRegion; + //this.forceCacheRefresh = forceCacheRefresh; + this.comment = comment; + this.collectionKeys = collectionKeys; + this.readOnly = readOnly; + this.resultTransformer = transformer; + } + + public QueryParameters( + final Type[] positionalParameterTypes, + final Object[] positionalParameterValues, + final Map namedParameters, + final Map lockModes, + final RowSelection rowSelection, + final boolean readOnly, + final boolean cacheable, + final String cacheRegion, + //final boolean forceCacheRefresh, + final String comment, + final Serializable[] collectionKeys, + final Object optionalObject, + final String optionalEntityName, + final Serializable optionalId, + final ResultTransformer transformer + ) { + this( + positionalParameterTypes, + positionalParameterValues, + namedParameters, + lockModes, + rowSelection, + readOnly, + cacheable, + cacheRegion, + comment, + collectionKeys, + transformer + ); + this.optionalEntityName = optionalEntityName; + this.optionalId = optionalId; + this.optionalObject = optionalObject; + } + + public boolean hasRowSelection() { + return rowSelection!=null; + } + + public Map getNamedParameters() { + return namedParameters; + } + + public Type[] getPositionalParameterTypes() { + return positionalParameterTypes; + } + + public Object[] getPositionalParameterValues() { + return positionalParameterValues; + } + + public RowSelection getRowSelection() { + return rowSelection; + } + + public ResultTransformer getResultTransformer() { + return resultTransformer; + } + + public void setNamedParameters(Map map) { + namedParameters = map; + } + + public void setPositionalParameterTypes(Type[] types) { + positionalParameterTypes = types; + } + + public void setPositionalParameterValues(Object[] objects) { + positionalParameterValues = objects; + } + + public void setRowSelection(RowSelection selection) { + rowSelection = selection; + } + + public Map getLockModes() { + return lockModes; + } + + public void setLockModes(Map map) { + lockModes = map; + } + + public void traceParameters(SessionFactoryImplementor factory) throws HibernateException { + Printer print = new Printer(factory); + if (positionalParameterValues.length!=0) { + log.trace( + "parameters: " + + print.toString(positionalParameterTypes, positionalParameterValues) + ); + } + if (namedParameters!=null) { + log.trace( "named parameters: " + print.toString(namedParameters) ); + } + } + + public boolean isCacheable() { + return cacheable; + } + + public void setCacheable(boolean b) { + cacheable = b; + } + + public String getCacheRegion() { + return cacheRegion; + } + + public void setCacheRegion(String cacheRegion) { + this.cacheRegion = cacheRegion; + } + + public void validateParameters() throws QueryException { + int types = positionalParameterTypes==null ? 0 : positionalParameterTypes.length; + int values = positionalParameterValues==null ? 0 : positionalParameterValues.length; + if (types!=values) { + throw new QueryException( + "Number of positional parameter types:" + types + + " does not match number of positional parameters: " + values + ); + } + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public ScrollMode getScrollMode() { + return scrollMode; + } + + public void setScrollMode(ScrollMode scrollMode) { + this.scrollMode = scrollMode; + } + + public Serializable[] getCollectionKeys() { + return collectionKeys; + } + + public void setCollectionKeys(Serializable[] collectionKeys) { + this.collectionKeys = collectionKeys; + } + + public String getOptionalEntityName() { + return optionalEntityName; + } + + public void setOptionalEntityName(String optionalEntityName) { + this.optionalEntityName = optionalEntityName; + } + + public Serializable getOptionalId() { + return optionalId; + } + + public void setOptionalId(Serializable optionalId) { + this.optionalId = optionalId; + } + + public Object getOptionalObject() { + return optionalObject; + } + + public void setOptionalObject(Object optionalObject) { + this.optionalObject = optionalObject; + } + + public boolean isReadOnly() { + return readOnly; + } + + public void setReadOnly(boolean readOnly) { + this.readOnly = readOnly; + } + + public void setCallable(boolean callable) { + this.callable = callable; + } + + public boolean isCallable() { + return callable; + } + + public boolean hasAutoDiscoverScalarTypes() { + return autodiscovertypes; + } + + public void processFilters(String sql, SessionImplementor session) { + + if ( session.getEnabledFilters().size()==0 || sql.indexOf(ParserHelper.HQL_VARIABLE_PREFIX)<0 ) { + // HELLA IMPORTANT OPTIMIZATION!!! + processedPositionalParameterValues = getPositionalParameterValues(); + processedPositionalParameterTypes = getPositionalParameterTypes(); + processedSQL = sql; + } + else { + + Dialect dialect = session.getFactory().getDialect(); + String symbols = new StringBuffer().append( ParserHelper.HQL_SEPARATORS ) + .append( dialect.openQuote() ) + .append( dialect.closeQuote() ) + .toString(); + StringTokenizer tokens = new StringTokenizer( sql, symbols, true ); + StringBuffer result = new StringBuffer(); + + List parameters = new ArrayList(); + List parameterTypes = new ArrayList(); + + while ( tokens.hasMoreTokens() ) { + final String token = tokens.nextToken(); + if ( token.startsWith( ParserHelper.HQL_VARIABLE_PREFIX ) ) { + String filterParameterName = token.substring( 1 ); + Object value = session.getFilterParameterValue( filterParameterName ); + Type type = session.getFilterParameterType( filterParameterName ); + if ( value != null && Collection.class.isAssignableFrom( value.getClass() ) ) { + Iterator itr = ( ( Collection ) value ).iterator(); + while ( itr.hasNext() ) { + Object elementValue = itr.next(); + result.append( '?' ); + parameters.add( elementValue ); + parameterTypes.add( type ); + if ( itr.hasNext() ) { + result.append( ", " ); + } + } + } + else { + result.append( '?' ); + parameters.add( value ); + parameterTypes.add( type ); + } + } + else { + result.append( token ); + } + } + parameters.addAll( Arrays.asList( getPositionalParameterValues() ) ); + parameterTypes.addAll( Arrays.asList( getPositionalParameterTypes() ) ); + processedPositionalParameterValues = parameters.toArray(); + processedPositionalParameterTypes = ( Type[] ) parameterTypes.toArray( new Type[0] ); + processedSQL = result.toString(); + + } + } + + public String getFilteredSQL() { + return processedSQL; + } + + public Object[] getFilteredPositionalParameterValues() { + return processedPositionalParameterValues; + } + + public Type[] getFilteredPositionalParameterTypes() { + return processedPositionalParameterTypes; + } + + public boolean isNaturalKeyLookup() { + return isNaturalKeyLookup; + } + + public void setNaturalKeyLookup(boolean isNaturalKeyLookup) { + this.isNaturalKeyLookup = isNaturalKeyLookup; + } + + public void setAutoDiscoverScalarTypes(boolean autodiscovertypes) { + this.autodiscovertypes = autodiscovertypes; + } + + public QueryParameters createCopyUsing(RowSelection selection) { + QueryParameters copy = new QueryParameters( + this.positionalParameterTypes, + this.positionalParameterValues, + this.namedParameters, + this.lockModes, + selection, + this.readOnly, + this.cacheable, + this.cacheRegion, + this.comment, + this.collectionKeys, + this.optionalObject, + this.optionalEntityName, + this.optionalId, + this.resultTransformer + ); + copy.processedSQL = this.processedSQL; + copy.processedPositionalParameterTypes = this.processedPositionalParameterTypes; + copy.processedPositionalParameterValues = this.processedPositionalParameterValues; + return copy; + } + + +} diff --git a/src/org/hibernate/engine/ResultSetMappingDefinition.java b/src/org/hibernate/engine/ResultSetMappingDefinition.java new file mode 100644 index 0000000000..5c2c219083 --- /dev/null +++ b/src/org/hibernate/engine/ResultSetMappingDefinition.java @@ -0,0 +1,46 @@ +//$Id$ +package org.hibernate.engine; + +import java.util.ArrayList; +import java.util.List; +import java.io.Serializable; + +import org.hibernate.engine.query.sql.NativeSQLQueryReturn; + +/** + * Keep a description of the resultset mapping + * + * @author Emmanuel Bernard + */ +public class ResultSetMappingDefinition implements Serializable { + + private final String name; + private final List /*NativeSQLQueryReturn*/ queryReturns = new ArrayList(); + + public ResultSetMappingDefinition(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void addQueryReturn(NativeSQLQueryReturn queryReturn) { + queryReturns.add( queryReturn ); + } + +// We could also keep these if needed for binary compatibility with annotations, provided +// it only uses the addXXX() methods... +// public void addEntityQueryReturn(NativeSQLQueryNonScalarReturn entityQueryReturn) { +// entityQueryReturns.add(entityQueryReturn); +// } +// +// public void addScalarQueryReturn(NativeSQLQueryScalarReturn scalarQueryReturn) { +// scalarQueryReturns.add(scalarQueryReturn); +// } + + public NativeSQLQueryReturn[] getQueryReturns() { + return ( NativeSQLQueryReturn[] ) queryReturns.toArray( new NativeSQLQueryReturn[0] ); + } + +} diff --git a/src/org/hibernate/engine/RowSelection.java b/src/org/hibernate/engine/RowSelection.java new file mode 100644 index 0000000000..1aa776f957 --- /dev/null +++ b/src/org/hibernate/engine/RowSelection.java @@ -0,0 +1,50 @@ +//$Id$ +package org.hibernate.engine; + +/** + * Represents a selection of rows in a JDBC ResultSet + * @author Gavin King + */ +public final class RowSelection { + private Integer firstRow; + private Integer maxRows; + private Integer timeout; + private Integer fetchSize; + + public void setFirstRow(Integer firstRow) { + this.firstRow = firstRow; + } + + public Integer getFirstRow() { + return firstRow; + } + + public void setMaxRows(Integer maxRows) { + this.maxRows = maxRows; + } + + public Integer getMaxRows() { + return maxRows; + } + + public void setTimeout(Integer timeout) { + this.timeout = timeout; + } + + public Integer getTimeout() { + return timeout; + } + + public Integer getFetchSize() { + return fetchSize; + } + + public void setFetchSize(Integer fetchSize) { + this.fetchSize = fetchSize; + } + + public boolean definesLimits() { + return maxRows != null || + ( firstRow != null && firstRow.intValue() <= 0 ); + } +} diff --git a/src/org/hibernate/engine/SessionFactoryImplementor.java b/src/org/hibernate/engine/SessionFactoryImplementor.java new file mode 100644 index 0000000000..644c5a3a78 --- /dev/null +++ b/src/org/hibernate/engine/SessionFactoryImplementor.java @@ -0,0 +1,171 @@ +//$Id$ +package org.hibernate.engine; + +import java.util.Map; +import java.util.Set; +import java.sql.Connection; + +import javax.transaction.TransactionManager; + +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.MappingException; +import org.hibernate.SessionFactory; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.proxy.EntityNotFoundDelegate; +import org.hibernate.engine.query.QueryPlanCache; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.cache.Cache; +import org.hibernate.cache.QueryCache; +import org.hibernate.cache.UpdateTimestampsCache; +import org.hibernate.cfg.Settings; +import org.hibernate.connection.ConnectionProvider; +import org.hibernate.dialect.Dialect; +import org.hibernate.dialect.function.SQLFunctionRegistry; +import org.hibernate.exception.SQLExceptionConverter; +import org.hibernate.id.IdentifierGenerator; +import org.hibernate.stat.StatisticsImplementor; +import org.hibernate.type.Type; + +/** + * Defines the internal contract between the SessionFactory and other parts of + * Hibernate such as implementors of Type. + * + * @see org.hibernate.SessionFactory + * @see org.hibernate.impl.SessionFactoryImpl + * @author Gavin King + */ +public interface SessionFactoryImplementor extends Mapping, SessionFactory { + + /** + * Get the persister for the named entity + */ + public EntityPersister getEntityPersister(String entityName) throws MappingException; + /** + * Get the persister object for a collection role + */ + public CollectionPersister getCollectionPersister(String role) throws MappingException; + + /** + * Get the SQL Dialect + */ + public Dialect getDialect(); + + public Interceptor getInterceptor(); + + public QueryPlanCache getQueryPlanCache(); + + /** + * Get the return types of a query + */ + public Type[] getReturnTypes(String queryString) throws HibernateException; + + /** + * Get the return aliases of a query + */ + public String[] getReturnAliases(String queryString) throws HibernateException; + + /** + * Get the connection provider + */ + public ConnectionProvider getConnectionProvider(); + /** + * Get the names of all persistent classes that implement/extend the given interface/class + */ + public String[] getImplementors(String className) throws MappingException; + /** + * Get a class name, using query language imports + */ + public String getImportedClassName(String name); + + + /** + * Get the JTA transaction manager + */ + public TransactionManager getTransactionManager(); + + + /** + * Get the default query cache + */ + public QueryCache getQueryCache(); + /** + * Get a particular named query cache, or the default cache + * @param regionName the name of the cache region, or null for the default query cache + * @return the existing cache, or a newly created cache if none by that region name + */ + public QueryCache getQueryCache(String regionName) throws HibernateException; + + /** + * Get the cache of table update timestamps + */ + public UpdateTimestampsCache getUpdateTimestampsCache(); + /** + * Statistics SPI + */ + public StatisticsImplementor getStatisticsImplementor(); + + public NamedQueryDefinition getNamedQuery(String queryName); + public NamedSQLQueryDefinition getNamedSQLQuery(String queryName); + public ResultSetMappingDefinition getResultSetMapping(String name); + + /** + * Get the identifier generator for the hierarchy + */ + public IdentifierGenerator getIdentifierGenerator(String rootEntityName); + + /** + * Get a named second-level cache region + */ + public Cache getSecondLevelCacheRegion(String regionName); + + public Map getAllSecondLevelCacheRegions(); + + /** + * Retrieves the SQLExceptionConverter in effect for this SessionFactory. + * + * @return The SQLExceptionConverter for this SessionFactory. + */ + public SQLExceptionConverter getSQLExceptionConverter(); + + public Settings getSettings(); + + /** + * Get a nontransactional "current" session for Hibernate EntityManager + */ + public org.hibernate.classic.Session openTemporarySession() throws HibernateException; + + /** + * Open a session conforming to the given parameters. Used mainly by + * {@link org.hibernate.context.JTASessionContext} for current session processing. + * + * @param connection The external jdbc connection to use, if one (i.e., optional). + * @param flushBeforeCompletionEnabled Should the session be auto-flushed + * prior to transaction completion? + * @param autoCloseSessionEnabled Should the session be auto-closed after + * transaction completion? + * @param connectionReleaseMode The release mode for managed jdbc connections. + * @return An appropriate session. + * @throws HibernateException + */ + public org.hibernate.classic.Session openSession( + final Connection connection, + final boolean flushBeforeCompletionEnabled, + final boolean autoCloseSessionEnabled, + final ConnectionReleaseMode connectionReleaseMode) throws HibernateException; + + /** + * Retrieves a set of all the collection roles in which the given entity + * is a participant, as either an index or an element. + * + * @param entityName The entity name for which to get the collection roles. + * @return set of all the collection roles in which the given entityName participates. + */ + public Set getCollectionRolesByEntityParticipant(String entityName); + + public EntityNotFoundDelegate getEntityNotFoundDelegate(); + + public SQLFunctionRegistry getSqlFunctionRegistry(); + +} diff --git a/src/org/hibernate/engine/SessionImplementor.java b/src/org/hibernate/engine/SessionImplementor.java new file mode 100644 index 0000000000..7a5a78c38e --- /dev/null +++ b/src/org/hibernate/engine/SessionImplementor.java @@ -0,0 +1,302 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.sql.Connection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.hibernate.CacheMode; +import org.hibernate.EntityMode; +import org.hibernate.FlushMode; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.Query; +import org.hibernate.ScrollMode; +import org.hibernate.ScrollableResults; +import org.hibernate.Transaction; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.event.EventListeners; +import org.hibernate.impl.CriteriaImpl; +import org.hibernate.jdbc.Batcher; +import org.hibernate.jdbc.JDBCContext; +import org.hibernate.loader.custom.CustomQuery; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.Type; + + +/** + * Defines the internal contract between the Session and other parts of + * Hibernate such as implementors of Type or EntityPersister. + * + * @see org.hibernate.Session the interface to the application + * @see org.hibernate.impl.SessionImpl the actual implementation + * @author Gavin King + */ +public interface SessionImplementor extends Serializable { + + /** + * Retrieves the interceptor currently in use by this event source. + * + * @return The interceptor. + */ + public Interceptor getInterceptor(); + + /** + * Enable/disable automatic cache clearing from after transaction + * completion (for EJB3) + */ + public void setAutoClear(boolean enabled); + + /** + * Does this Session have an active Hibernate transaction + * or is there a JTA transaction in progress? + */ + public boolean isTransactionInProgress(); + + /** + * Initialize the collection (if not already initialized) + */ + public void initializeCollection(PersistentCollection collection, boolean writing) + throws HibernateException; + + /** + * Load an instance without checking if it was deleted. + * + * When nullable is disabled this method may create a new proxy or + * return an existing proxy; if it does not exist, throw an exception. + * + * When nullable is enabled, the method does not create new proxies + * (but might return an existing proxy); if it does not exist, return + * null. + * + * When eager is enabled, the object is eagerly fetched + */ + public Object internalLoad(String entityName, Serializable id, boolean eager, boolean nullable) + throws HibernateException; + + /** + * Load an instance immediately. This method is only called when lazily initializing a proxy. + * Do not return the proxy. + */ + public Object immediateLoad(String entityName, Serializable id) throws HibernateException; + + /** + * System time before the start of the transaction + */ + public long getTimestamp(); + /** + * Get the creating SessionFactoryImplementor + */ + public SessionFactoryImplementor getFactory(); + /** + * Get the prepared statement Batcher for this session + */ + public Batcher getBatcher(); + + /** + * Execute a find() query + */ + public List list(String query, QueryParameters queryParameters) throws HibernateException; + /** + * Execute an iterate() query + */ + public Iterator iterate(String query, QueryParameters queryParameters) throws HibernateException; + /** + * Execute a scroll() query + */ + public ScrollableResults scroll(String query, QueryParameters queryParameters) throws HibernateException; + /** + * Execute a criteria query + */ + public ScrollableResults scroll(CriteriaImpl criteria, ScrollMode scrollMode); + /** + * Execute a criteria query + */ + public List list(CriteriaImpl criteria); + + /** + * Execute a filter + */ + public List listFilter(Object collection, String filter, QueryParameters queryParameters) throws HibernateException; + /** + * Iterate a filter + */ + public Iterator iterateFilter(Object collection, String filter, QueryParameters queryParameters) throws HibernateException; + + /** + * Get the EntityPersister for any instance + * @param entityName optional entity name + * @param object the entity instance + */ + public EntityPersister getEntityPersister(String entityName, Object object) throws HibernateException; + + /** + * Get the entity instance associated with the given Key, + * calling the Interceptor if necessary + */ + public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException; + + /** + * Notify the session that the transaction completed, so we no longer + * own the old locks. (Also we should release cache softlocks.) May + * be called multiple times during the transaction completion process. + * Also called after an autocommit, in which case the second argument + * is null. + */ + public void afterTransactionCompletion(boolean successful, Transaction tx); + + /** + * Notify the session that the transaction is about to complete + */ + public void beforeTransactionCompletion(Transaction tx); + + /** + * Return the identifier of the persistent object, or null if + * not associated with the session + */ + public Serializable getContextEntityIdentifier(Object object); + + /** + * The best guess entity name for an entity not in an association + */ + public String bestGuessEntityName(Object object); + + /** + * The guessed entity name for an entity not in an association + */ + public String guessEntityName(Object entity) throws HibernateException; + + /** + * Instantiate the entity class, initializing with the given identifier + */ + public Object instantiate(String entityName, Serializable id) throws HibernateException; + + /** + * Execute an SQL Query + */ + public List listCustomQuery(CustomQuery customQuery, QueryParameters queryParameters) + throws HibernateException; + + /** + * Execute an SQL Query + */ + public ScrollableResults scrollCustomQuery(CustomQuery customQuery, QueryParameters queryParameters) + throws HibernateException; + + /** + * Execute a native SQL query, and return the results as a fully built list. + * + * @param spec The specification of the native SQL query to execute. + * @param queryParameters The parameters by which to perform the execution. + * @return The result list. + * @throws HibernateException + */ + public List list(NativeSQLQuerySpecification spec, QueryParameters queryParameters) + throws HibernateException; + + /** + * Execute a native SQL query, and return the results as a scrollable result. + * + * @param spec The specification of the native SQL query to execute. + * @param queryParameters The parameters by which to perform the execution. + * @return The resulting scrollable result. + * @throws HibernateException + */ + public ScrollableResults scroll(NativeSQLQuerySpecification spec, QueryParameters queryParameters) + throws HibernateException; + + /** + * Retreive the currently set value for a filter parameter. + * + * @param filterParameterName The filter parameter name in the format + * {FILTER_NAME.PARAMETER_NAME}. + * @return The filter parameter value. + */ + public Object getFilterParameterValue(String filterParameterName); + + /** + * Retreive the type for a given filter parrameter. + * + * @param filterParameterName The filter parameter name in the format + * {FILTER_NAME.PARAMETER_NAME}. + */ + public Type getFilterParameterType(String filterParameterName); + + /** + * Return the currently enabled filters. The filter map is keyed by filter + * name, with values corresponding to the {@link org.hibernate.impl.FilterImpl} + * instance. + * @return The currently enabled filters. + */ + public Map getEnabledFilters(); + + public int getDontFlushFromFind(); + + /** + * Retrieves the configured event listeners from this event source. + * + * @return The configured event listeners. + */ + public EventListeners getListeners(); + + //TODO: temporary + + /** + * Get the persistence context for this session + */ + public PersistenceContext getPersistenceContext(); + + /** + * Execute a HQL update or delete query + */ + int executeUpdate(String query, QueryParameters queryParameters) throws HibernateException; + + /** + * Execute a native SQL update or delete query + */ + int executeNativeUpdate(NativeSQLQuerySpecification specification, QueryParameters queryParameters) throws HibernateException; + + // copied from Session: + + public EntityMode getEntityMode(); + public CacheMode getCacheMode(); + public void setCacheMode(CacheMode cm); + public boolean isOpen(); + public boolean isConnected(); + public FlushMode getFlushMode(); + public void setFlushMode(FlushMode fm); + public Connection connection(); + public void flush(); + + /** + * Get a Query instance for a named query or named native SQL query + */ + public Query getNamedQuery(String name); + /** + * Get a Query instance for a named native SQL query + */ + public Query getNamedSQLQuery(String name); + + public boolean isEventSource(); + + public void afterScrollOperation(); + + public void setFetchProfile(String name); + + public String getFetchProfile(); + + public JDBCContext getJDBCContext(); + + /** + * Determine whether the session is closed. Provided seperately from + * {@link #isOpen()} as this method does not attempt any JTA synch + * registration, where as {@link #isOpen()} does; which makes this one + * nicer to use for most internal purposes. + * + * @return True if the session is closed; false otherwise. + */ + public boolean isClosed(); +} diff --git a/src/org/hibernate/engine/StatefulPersistenceContext.java b/src/org/hibernate/engine/StatefulPersistenceContext.java new file mode 100644 index 0000000000..8f5912a5a8 --- /dev/null +++ b/src/org/hibernate/engine/StatefulPersistenceContext.java @@ -0,0 +1,1366 @@ +// $Id$ +package org.hibernate.engine; + +import java.io.IOException; +import java.io.InvalidObjectException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.collections.ReferenceMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.NonUniqueObjectException; +import org.hibernate.PersistentObjectException; +import org.hibernate.TransientObjectException; +import org.hibernate.engine.loading.LoadContexts; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; +import org.hibernate.tuple.ElementWrapper; +import org.hibernate.util.IdentityMap; +import org.hibernate.util.MarkerObject; + +/** + * A PersistenceContext represents the state of persistent "stuff" which + * Hibernate is tracking. This includes persistent entities, collections, + * as well as proxies generated. + *

    + * There is meant to be a one-to-one correspondence between a SessionImpl and + * a PersistentContext. The SessionImpl uses the PersistentContext to track + * the current state of its context. Event-listeners then use the + * PersistentContext to drive their processing. + * + * @author Steve Ebersole + */ +public class StatefulPersistenceContext implements PersistenceContext { + + public static final Object NO_ROW = new MarkerObject( "NO_ROW" ); + + private static final Log log = LogFactory.getLog( StatefulPersistenceContext.class ); + private static final Log PROXY_WARN_LOG = LogFactory.getLog( StatefulPersistenceContext.class.getName() + ".ProxyWarnLog" ); + private static final int INIT_COLL_SIZE = 8; + + private SessionImplementor session; + + // Loaded entity instances, by EntityKey + private Map entitiesByKey; + + // Loaded entity instances, by EntityUniqueKey + private Map entitiesByUniqueKey; + + // Identity map of EntityEntry instances, by the entity instance + private Map entityEntries; + + // Entity proxies, by EntityKey + private Map proxiesByKey; + + // Snapshots of current database state for entities + // that have *not* been loaded + private Map entitySnapshotsByKey; + + // Identity map of array holder ArrayHolder instances, by the array instance + private Map arrayHolders; + + // Identity map of CollectionEntry instances, by the collection wrapper + private Map collectionEntries; + + // Collection wrappers, by the CollectionKey + private Map collectionsByKey; //key=CollectionKey, value=PersistentCollection + + // Set of EntityKeys of deleted objects + private HashSet nullifiableEntityKeys; + + // properties that we have tried to load, and not found in the database + private HashSet nullAssociations; + + // A list of collection wrappers that were instantiating during result set + // processing, that we will need to initialize at the end of the query + private List nonlazyCollections; + + // A container for collections we load up when the owning entity is not + // yet loaded ... for now, this is purely transient! + private Map unownedCollections; + + private int cascading = 0; + private int loadCounter = 0; + private boolean flushing = false; + + private boolean hasNonReadOnlyEntities = false; + + private LoadContexts loadContexts; + private BatchFetchQueue batchFetchQueue; + + + + /** + * Constructs a PersistentContext, bound to the given session. + * + * @param session The session "owning" this context. + */ + public StatefulPersistenceContext(SessionImplementor session) { + this.session = session; + + entitiesByKey = new HashMap( INIT_COLL_SIZE ); + entitiesByUniqueKey = new HashMap( INIT_COLL_SIZE ); + proxiesByKey = new ReferenceMap( ReferenceMap.HARD, ReferenceMap.WEAK ); + entitySnapshotsByKey = new HashMap( INIT_COLL_SIZE ); + + entityEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE ); + collectionEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE ); + collectionsByKey = new HashMap( INIT_COLL_SIZE ); + arrayHolders = IdentityMap.instantiate( INIT_COLL_SIZE ); + + nullifiableEntityKeys = new HashSet(); + + initTransientState(); + } + + private void initTransientState() { + nullAssociations = new HashSet( INIT_COLL_SIZE ); + nonlazyCollections = new ArrayList( INIT_COLL_SIZE ); + } + + public boolean isStateless() { + return false; + } + + public SessionImplementor getSession() { + return session; + } + + public LoadContexts getLoadContexts() { + if ( loadContexts == null ) { + loadContexts = new LoadContexts( this ); + } + return loadContexts; + } + + public void addUnownedCollection(CollectionKey key, PersistentCollection collection) { + if (unownedCollections==null) { + unownedCollections = new HashMap(8); + } + unownedCollections.put(key, collection); + } + + public PersistentCollection useUnownedCollection(CollectionKey key) { + if (unownedCollections==null) { + return null; + } + else { + return (PersistentCollection) unownedCollections.remove(key); + } + } + + /** + * Get the BatchFetchQueue, instantiating one if + * necessary. + */ + public BatchFetchQueue getBatchFetchQueue() { + if (batchFetchQueue==null) { + batchFetchQueue = new BatchFetchQueue(this); + } + return batchFetchQueue; + } + + public void clear() { + Iterator itr = proxiesByKey.values().iterator(); + while ( itr.hasNext() ) { + final LazyInitializer li = ( ( HibernateProxy ) itr.next() ).getHibernateLazyInitializer(); + li.setSession( null ); + } + Map.Entry[] collectionEntryArray = IdentityMap.concurrentEntries( collectionEntries ); + for ( int i = 0; i < collectionEntryArray.length; i++ ) { + ( ( PersistentCollection ) collectionEntryArray[i].getKey() ).unsetSession( getSession() ); + } + arrayHolders.clear(); + entitiesByKey.clear(); + entitiesByUniqueKey.clear(); + entityEntries.clear(); + entitySnapshotsByKey.clear(); + collectionsByKey.clear(); + collectionEntries.clear(); + if ( unownedCollections != null ) { + unownedCollections.clear(); + } + proxiesByKey.clear(); + nullifiableEntityKeys.clear(); + if ( batchFetchQueue != null ) { + batchFetchQueue.clear(); + } + hasNonReadOnlyEntities = false; + if ( loadContexts != null ) { + loadContexts.cleanup(); + } + } + + public boolean hasNonReadOnlyEntities() { + return hasNonReadOnlyEntities; + } + + public void setEntryStatus(EntityEntry entry, Status status) { + entry.setStatus(status); + setHasNonReadOnlyEnties(status); + } + + private void setHasNonReadOnlyEnties(Status status) { + if ( status==Status.DELETED || status==Status.MANAGED || status==Status.SAVING ) { + hasNonReadOnlyEntities = true; + } + } + + public void afterTransactionCompletion() { + // Downgrade locks + Iterator iter = entityEntries.values().iterator(); + while ( iter.hasNext() ) { + ( (EntityEntry) iter.next() ).setLockMode(LockMode.NONE); + } + } + + /** + * Get the current state of the entity as known to the underlying + * database, or null if there is no corresponding row + */ + public Object[] getDatabaseSnapshot(Serializable id, EntityPersister persister) + throws HibernateException { + EntityKey key = new EntityKey( id, persister, session.getEntityMode() ); + Object cached = entitySnapshotsByKey.get(key); + if (cached!=null) { + return cached==NO_ROW ? null : (Object[]) cached; + } + else { + Object[] snapshot = persister.getDatabaseSnapshot( id, session ); + entitySnapshotsByKey.put( key, snapshot==null ? NO_ROW : snapshot ); + return snapshot; + } + } + + public Object[] getNaturalIdSnapshot(Serializable id, EntityPersister persister) + throws HibernateException { + if ( !persister.hasNaturalIdentifier() ) { + return null; + } + + // if the natural-id is marked as non-mutable, it is not retrieved during a + // normal database-snapshot operation... + int[] props = persister.getNaturalIdentifierProperties(); + boolean[] updateable = persister.getPropertyUpdateability(); + boolean allNatualIdPropsAreUpdateable = true; + for ( int i = 0; i < props.length; i++ ) { + if ( !updateable[ props[i] ] ) { + allNatualIdPropsAreUpdateable = false; + break; + } + } + + if ( allNatualIdPropsAreUpdateable ) { + // do this when all the properties are updateable since there is + // a certain likelihood that the information will already be + // snapshot-cached. + Object[] entitySnapshot = getDatabaseSnapshot( id, persister ); + if ( entitySnapshot == NO_ROW ) { + return null; + } + Object[] naturalIdSnapshot = new Object[ props.length ]; + for ( int i = 0; i < props.length; i++ ) { + naturalIdSnapshot[i] = entitySnapshot[ props[i] ]; + } + return naturalIdSnapshot; + } + else { + return persister.getNaturalIdentifierSnapshot( id, session ); + } + } + + /** + * Retrieve the cached database snapshot for the requested entity key. + *

    + * This differs from {@link #getDatabaseSnapshot} is two important respects:

      + *
    1. no snapshot is obtained from the database if not already cached
    2. + *
    3. an entry of {@link #NO_ROW} here is interpretet as an exception
    4. + *
    + * @param key The entity key for which to retrieve the cached snapshot + * @return The cached snapshot + * @throws IllegalStateException if the cached snapshot was == {@link #NO_ROW}. + */ + public Object[] getCachedDatabaseSnapshot(EntityKey key) { + Object snapshot = entitySnapshotsByKey.get( key ); + if ( snapshot == NO_ROW ) { + throw new IllegalStateException( "persistence context reported no row snapshot for " + MessageHelper.infoString( key.getEntityName(), key.getIdentifier() ) ); + } + return ( Object[] ) snapshot; + } + + /*public void removeDatabaseSnapshot(EntityKey key) { + entitySnapshotsByKey.remove(key); + }*/ + + public void addEntity(EntityKey key, Object entity) { + entitiesByKey.put(key, entity); + getBatchFetchQueue().removeBatchLoadableEntityKey(key); + } + + /** + * Get the entity instance associated with the given + * EntityKey + */ + public Object getEntity(EntityKey key) { + return entitiesByKey.get(key); + } + + public boolean containsEntity(EntityKey key) { + return entitiesByKey.containsKey(key); + } + + /** + * Remove an entity from the session cache, also clear + * up other state associated with the entity, all except + * for the EntityEntry + */ + public Object removeEntity(EntityKey key) { + Object entity = entitiesByKey.remove(key); + Iterator iter = entitiesByUniqueKey.values().iterator(); + while ( iter.hasNext() ) { + if ( iter.next()==entity ) iter.remove(); + } + entitySnapshotsByKey.remove(key); + nullifiableEntityKeys.remove(key); + getBatchFetchQueue().removeBatchLoadableEntityKey(key); + getBatchFetchQueue().removeSubselect(key); + return entity; + } + + /** + * Get an entity cached by unique key + */ + public Object getEntity(EntityUniqueKey euk) { + return entitiesByUniqueKey.get(euk); + } + + /** + * Add an entity to the cache by unique key + */ + public void addEntity(EntityUniqueKey euk, Object entity) { + entitiesByUniqueKey.put(euk, entity); + } + + /** + * Retreive the EntityEntry representation of the given entity. + * + * @param entity The entity for which to locate the EntityEntry. + * @return The EntityEntry for the given entity. + */ + public EntityEntry getEntry(Object entity) { + return (EntityEntry) entityEntries.get(entity); + } + + /** + * Remove an entity entry from the session cache + */ + public EntityEntry removeEntry(Object entity) { + return (EntityEntry) entityEntries.remove(entity); + } + + /** + * Is there an EntityEntry for this instance? + */ + public boolean isEntryFor(Object entity) { + return entityEntries.containsKey(entity); + } + + /** + * Get the collection entry for a persistent collection + */ + public CollectionEntry getCollectionEntry(PersistentCollection coll) { + return (CollectionEntry) collectionEntries.get(coll); + } + + /** + * Adds an entity to the internal caches. + */ + public EntityEntry addEntity( + final Object entity, + final Status status, + final Object[] loadedState, + final EntityKey entityKey, + final Object version, + final LockMode lockMode, + final boolean existsInDatabase, + final EntityPersister persister, + final boolean disableVersionIncrement, + boolean lazyPropertiesAreUnfetched + ) { + + addEntity( entityKey, entity ); + + return addEntry( + entity, + status, + loadedState, + null, + entityKey.getIdentifier(), + version, + lockMode, + existsInDatabase, + persister, + disableVersionIncrement, + lazyPropertiesAreUnfetched + ); + } + + + /** + * Generates an appropriate EntityEntry instance and adds it + * to the event source's internal caches. + */ + public EntityEntry addEntry( + final Object entity, + final Status status, + final Object[] loadedState, + final Object rowId, + final Serializable id, + final Object version, + final LockMode lockMode, + final boolean existsInDatabase, + final EntityPersister persister, + final boolean disableVersionIncrement, + boolean lazyPropertiesAreUnfetched) { + + EntityEntry e = new EntityEntry( + status, + loadedState, + rowId, + id, + version, + lockMode, + existsInDatabase, + persister, + session.getEntityMode(), + disableVersionIncrement, + lazyPropertiesAreUnfetched + ); + entityEntries.put(entity, e); + + setHasNonReadOnlyEnties(status); + return e; + } + + public boolean containsCollection(PersistentCollection collection) { + return collectionEntries.containsKey(collection); + } + + public boolean containsProxy(Object entity) { + return proxiesByKey.containsValue( entity ); + } + + /** + * Takes the given object and, if it represents a proxy, reassociates it with this event source. + * + * @param value The possible proxy to be reassociated. + * @return Whether the passed value represented an actual proxy which got initialized. + * @throws MappingException + */ + public boolean reassociateIfUninitializedProxy(Object value) throws MappingException { + if ( value instanceof ElementWrapper ) { + value = ( (ElementWrapper) value ).getElement(); + } + + if ( !Hibernate.isInitialized(value) ) { + HibernateProxy proxy = (HibernateProxy) value; + LazyInitializer li = proxy.getHibernateLazyInitializer(); + reassociateProxy(li, proxy); + return true; + } + else { + return false; + } + } + + /** + * If a deleted entity instance is re-saved, and it has a proxy, we need to + * reset the identifier of the proxy + */ + public void reassociateProxy(Object value, Serializable id) throws MappingException { + if ( value instanceof ElementWrapper ) { + value = ( (ElementWrapper) value ).getElement(); + } + + if ( value instanceof HibernateProxy ) { + if ( log.isDebugEnabled() ) log.debug("setting proxy identifier: " + id); + HibernateProxy proxy = (HibernateProxy) value; + LazyInitializer li = proxy.getHibernateLazyInitializer(); + li.setIdentifier(id); + reassociateProxy(li, proxy); + } + } + + /** + * Associate a proxy that was instantiated by another session with this session + * + * @param li The proxy initializer. + * @param proxy The proxy to reassociate. + */ + private void reassociateProxy(LazyInitializer li, HibernateProxy proxy) { + if ( li.getSession() != this.getSession() ) { + EntityPersister persister = session.getFactory().getEntityPersister( li.getEntityName() ); + EntityKey key = new EntityKey( li.getIdentifier(), persister, session.getEntityMode() ); + // any earlier proxy takes precedence + if ( !proxiesByKey.containsKey( key ) ) { + proxiesByKey.put( key, proxy ); + } + proxy.getHibernateLazyInitializer().setSession( session ); + } + } + + /** + * Get the entity instance underlying the given proxy, throwing + * an exception if the proxy is uninitialized. If the given object + * is not a proxy, simply return the argument. + */ + public Object unproxy(Object maybeProxy) throws HibernateException { + if ( maybeProxy instanceof ElementWrapper ) { + maybeProxy = ( (ElementWrapper) maybeProxy ).getElement(); + } + + if ( maybeProxy instanceof HibernateProxy ) { + HibernateProxy proxy = (HibernateProxy) maybeProxy; + LazyInitializer li = proxy.getHibernateLazyInitializer(); + if ( li.isUninitialized() ) { + throw new PersistentObjectException( + "object was an uninitialized proxy for " + + li.getEntityName() + ); + } + return li.getImplementation(); //unwrap the object + } + else { + return maybeProxy; + } + } + + /** + * Possibly unproxy the given reference and reassociate it with the current session. + * + * @param maybeProxy The reference to be unproxied if it currently represents a proxy. + * @return The unproxied instance. + * @throws HibernateException + */ + public Object unproxyAndReassociate(Object maybeProxy) throws HibernateException { + if ( maybeProxy instanceof ElementWrapper ) { + maybeProxy = ( (ElementWrapper) maybeProxy ).getElement(); + } + + if ( maybeProxy instanceof HibernateProxy ) { + HibernateProxy proxy = (HibernateProxy) maybeProxy; + LazyInitializer li = proxy.getHibernateLazyInitializer(); + reassociateProxy(li, proxy); + return li.getImplementation(); //initialize + unwrap the object + } + else { + return maybeProxy; + } + } + + /** + * Attempts to check whether the given key represents an entity already loaded within the + * current session. + * @param object The entity reference against which to perform the uniqueness check. + * @throws HibernateException + */ + public void checkUniqueness(EntityKey key, Object object) throws HibernateException { + Object entity = getEntity(key); + if ( entity == object ) { + throw new AssertionFailure( "object already associated, but no entry was found" ); + } + if ( entity != null ) { + throw new NonUniqueObjectException( key.getIdentifier(), key.getEntityName() ); + } + } + + /** + * If the existing proxy is insufficiently "narrow" (derived), instantiate a new proxy + * and overwrite the registration of the old one. This breaks == and occurs only for + * "class" proxies rather than "interface" proxies. Also init the proxy to point to + * the given target implementation if necessary. + * + * @param proxy The proxy instance to be narrowed. + * @param persister The persister for the proxied entity. + * @param key The internal cache key for the proxied entity. + * @param object (optional) the actual proxied entity instance. + * @return An appropriately narrowed instance. + * @throws HibernateException + */ + public Object narrowProxy(Object proxy, EntityPersister persister, EntityKey key, Object object) + throws HibernateException { + + boolean alreadyNarrow = persister.getConcreteProxyClass( session.getEntityMode() ) + .isAssignableFrom( proxy.getClass() ); + + if ( !alreadyNarrow ) { + if ( PROXY_WARN_LOG.isWarnEnabled() ) { + PROXY_WARN_LOG.warn( + "Narrowing proxy to " + + persister.getConcreteProxyClass( session.getEntityMode() ) + + " - this operation breaks ==" + ); + } + + if ( object != null ) { + proxiesByKey.remove(key); + return object; //return the proxied object + } + else { + proxy = persister.createProxy( key.getIdentifier(), session ); + proxiesByKey.put(key, proxy); //overwrite old proxy + return proxy; + } + + } + else { + + if ( object != null ) { + LazyInitializer li = ( (HibernateProxy) proxy ).getHibernateLazyInitializer(); + li.setImplementation(object); + } + + return proxy; + + } + + } + + /** + * Return the existing proxy associated with the given EntityKey, or the + * third argument (the entity associated with the key) if no proxy exists. Init + * the proxy to the target implementation, if necessary. + */ + public Object proxyFor(EntityPersister persister, EntityKey key, Object impl) + throws HibernateException { + if ( !persister.hasProxy() ) return impl; + Object proxy = proxiesByKey.get(key); + if ( proxy != null ) { + return narrowProxy(proxy, persister, key, impl); + } + else { + return impl; + } + } + + /** + * Return the existing proxy associated with the given EntityKey, or the + * argument (the entity associated with the key) if no proxy exists. + * (slower than the form above) + */ + public Object proxyFor(Object impl) throws HibernateException { + EntityEntry e = getEntry(impl); + EntityPersister p = e.getPersister(); + return proxyFor( p, new EntityKey( e.getId(), p, session.getEntityMode() ), impl ); + } + + /** + * Get the entity that owns this persistent collection + */ + public Object getCollectionOwner(Serializable key, CollectionPersister collectionPersister) throws MappingException { + return getEntity( new EntityKey( key, collectionPersister.getOwnerEntityPersister(), session.getEntityMode() ) ); + } + + /** + * add a collection we just loaded up (still needs initializing) + */ + public void addUninitializedCollection(CollectionPersister persister, PersistentCollection collection, Serializable id) { + CollectionEntry ce = new CollectionEntry(collection, persister, id, flushing); + addCollection(collection, ce, id); + } + + /** + * add a detached uninitialized collection + */ + public void addUninitializedDetachedCollection(CollectionPersister persister, PersistentCollection collection) { + CollectionEntry ce = new CollectionEntry( persister, collection.getKey() ); + addCollection( collection, ce, collection.getKey() ); + } + + /** + * Add a new collection (ie. a newly created one, just instantiated by the + * application, with no database state or snapshot) + * @param collection The collection to be associated with the persistence context + */ + public void addNewCollection(CollectionPersister persister, PersistentCollection collection) + throws HibernateException { + addCollection(collection, persister); + } + + /** + * Add an collection to the cache, with a given collection entry. + * + * @param coll The collection for which we are adding an entry. + * @param entry The entry representing the collection. + * @param key The key of the collection's entry. + */ + private void addCollection(PersistentCollection coll, CollectionEntry entry, Serializable key) { + collectionEntries.put( coll, entry ); + CollectionKey collectionKey = new CollectionKey( entry.getLoadedPersister(), key, session.getEntityMode() ); + PersistentCollection old = ( PersistentCollection ) collectionsByKey.put( collectionKey, coll ); + if ( old != null ) { + if ( old == coll ) { + throw new AssertionFailure("bug adding collection twice"); + } + // or should it actually throw an exception? + old.unsetSession( session ); + collectionEntries.remove( old ); + // watch out for a case where old is still referenced + // somewhere in the object graph! (which is a user error) + } + } + + /** + * Add a collection to the cache, creating a new collection entry for it + * + * @param collection The collection for which we are adding an entry. + * @param persister The collection persister + */ + private void addCollection(PersistentCollection collection, CollectionPersister persister) { + CollectionEntry ce = new CollectionEntry( persister, collection ); + collectionEntries.put( collection, ce ); + } + + /** + * add an (initialized) collection that was created by another session and passed + * into update() (ie. one with a snapshot and existing state on the database) + */ + public void addInitializedDetachedCollection(CollectionPersister collectionPersister, PersistentCollection collection) + throws HibernateException { + if ( collection.isUnreferenced() ) { + //treat it just like a new collection + addCollection( collection, collectionPersister ); + } + else { + CollectionEntry ce = new CollectionEntry( collection, session.getFactory() ); + addCollection( collection, ce, collection.getKey() ); + } + } + + /** + * add a collection we just pulled out of the cache (does not need initializing) + */ + public CollectionEntry addInitializedCollection(CollectionPersister persister, PersistentCollection collection, Serializable id) + throws HibernateException { + CollectionEntry ce = new CollectionEntry(collection, persister, id, flushing); + ce.postInitialize(collection); + addCollection(collection, ce, id); + return ce; + } + + /** + * Get the collection instance associated with the CollectionKey + */ + public PersistentCollection getCollection(CollectionKey collectionKey) { + return (PersistentCollection) collectionsByKey.get(collectionKey); + } + + /** + * Register a collection for non-lazy loading at the end of the + * two-phase load + */ + public void addNonLazyCollection(PersistentCollection collection) { + nonlazyCollections.add(collection); + } + + /** + * Force initialization of all non-lazy collections encountered during + * the current two-phase load (actually, this is a no-op, unless this + * is the "outermost" load) + */ + public void initializeNonLazyCollections() throws HibernateException { + if ( loadCounter == 0 ) { + log.debug( "initializing non-lazy collections" ); + //do this work only at the very highest level of the load + loadCounter++; //don't let this method be called recursively + try { + int size; + while ( ( size = nonlazyCollections.size() ) > 0 ) { + //note that each iteration of the loop may add new elements + ( (PersistentCollection) nonlazyCollections.remove( size - 1 ) ).forceInitialization(); + } + } + finally { + loadCounter--; + clearNullProperties(); + } + } + } + + + /** + * Get the PersistentCollection object for an array + */ + public PersistentCollection getCollectionHolder(Object array) { + return (PersistentCollection) arrayHolders.get(array); + } + + /** + * Register a PersistentCollection object for an array. + * Associates a holder with an array - MUST be called after loading + * array, since the array instance is not created until endLoad(). + */ + public void addCollectionHolder(PersistentCollection holder) { + //TODO:refactor + make this method private + arrayHolders.put( holder.getValue(), holder ); + } + + public PersistentCollection removeCollectionHolder(Object array) { + return (PersistentCollection) arrayHolders.remove(array); + } + + /** + * Get the snapshot of the pre-flush collection state + */ + public Serializable getSnapshot(PersistentCollection coll) { + return getCollectionEntry(coll).getSnapshot(); + } + + /** + * Get the collection entry for a collection passed to filter, + * which might be a collection wrapper, an array, or an unwrapped + * collection. Return null if there is no entry. + */ + public CollectionEntry getCollectionEntryOrNull(Object collection) { + PersistentCollection coll; + if ( collection instanceof PersistentCollection ) { + coll = (PersistentCollection) collection; + //if (collection==null) throw new TransientObjectException("Collection was not yet persistent"); + } + else { + coll = getCollectionHolder(collection); + if ( coll == null ) { + //it might be an unwrapped collection reference! + //try to find a wrapper (slowish) + Iterator wrappers = IdentityMap.keyIterator(collectionEntries); + while ( wrappers.hasNext() ) { + PersistentCollection pc = (PersistentCollection) wrappers.next(); + if ( pc.isWrapper(collection) ) { + coll = pc; + break; + } + } + } + } + + return (coll == null) ? null : getCollectionEntry(coll); + } + + /** + * Get an existing proxy by key + */ + public Object getProxy(EntityKey key) { + return proxiesByKey.get(key); + } + + /** + * Add a proxy to the session cache + */ + public void addProxy(EntityKey key, Object proxy) { + proxiesByKey.put(key, proxy); + } + + /** + * Remove a proxy from the session cache. + *

    + * Additionally, ensure that any load optimization references + * such as batch or subselect loading get cleaned up as well. + * + * @param key The key of the entity proxy to be removed + * @return The proxy reference. + */ + public Object removeProxy(EntityKey key) { + if ( batchFetchQueue != null ) { + batchFetchQueue.removeBatchLoadableEntityKey( key ); + batchFetchQueue.removeSubselect( key ); + } + return proxiesByKey.remove( key ); + } + + /** + * Record the fact that an entity does not exist in the database + * + * @param key the primary key of the entity + */ + /*public void addNonExistantEntityKey(EntityKey key) { + nonExistantEntityKeys.add(key); + }*/ + + /** + * Record the fact that an entity does not exist in the database + * + * @param key a unique key of the entity + */ + /*public void addNonExistantEntityUniqueKey(EntityUniqueKey key) { + nonExistentEntityUniqueKeys.add(key); + }*/ + + /*public void removeNonExist(EntityKey key) { + nonExistantEntityKeys.remove(key); + }*/ + + /** + * Retrieve the set of EntityKeys representing nullifiable references + */ + public HashSet getNullifiableEntityKeys() { + return nullifiableEntityKeys; + } + + public Map getEntitiesByKey() { + return entitiesByKey; + } + + public Map getEntityEntries() { + return entityEntries; + } + + public Map getCollectionEntries() { + return collectionEntries; + } + + public Map getCollectionsByKey() { + return collectionsByKey; + } + + /** + * Do we already know that the entity does not exist in the + * database? + */ + /*public boolean isNonExistant(EntityKey key) { + return nonExistantEntityKeys.contains(key); + }*/ + + /** + * Do we already know that the entity does not exist in the + * database? + */ + /*public boolean isNonExistant(EntityUniqueKey key) { + return nonExistentEntityUniqueKeys.contains(key); + }*/ + + public int getCascadeLevel() { + return cascading; + } + + public int incrementCascadeLevel() { + return ++cascading; + } + + public int decrementCascadeLevel() { + return --cascading; + } + + public boolean isFlushing() { + return flushing; + } + + public void setFlushing(boolean flushing) { + this.flushing = flushing; + } + + /** + * Call this before begining a two-phase load + */ + public void beforeLoad() { + loadCounter++; + } + + /** + * Call this after finishing a two-phase load + */ + public void afterLoad() { + loadCounter--; + } + + /** + * Returns a string representation of the object. + * + * @return a string representation of the object. + */ + public String toString() { + return new StringBuffer() + .append("PersistenceContext[entityKeys=") + .append(entitiesByKey.keySet()) + .append(",collectionKeys=") + .append(collectionsByKey.keySet()) + .append("]") + .toString(); + } + + /** + * Search the persistence context for an owner for the child object, + * given a collection role. If mergeMap is non-null, also + * check the detached graph being merged for a parent. + */ + public Serializable getOwnerId(String entity, String property, Object childEntity, Map mergeMap) { + + EntityPersister persister = session.getFactory() + .getEntityPersister(entity); + final CollectionPersister collectionPersister = session.getFactory() + .getCollectionPersister(entity + '.' + property); + + Iterator entities = entityEntries.entrySet().iterator(); + while ( entities.hasNext() ) { + Map.Entry me = (Map.Entry) entities.next(); + EntityEntry ee = (EntityEntry) me.getValue(); + if ( persister.isSubclassEntityName( ee.getEntityName() ) ) { + Object instance = me.getKey(); + + //check if the managed object is the parent + boolean found = isFoundInParent( + property, + childEntity, + persister, + collectionPersister, + instance + ); + + if (!found && mergeMap!=null) { + //check if the detached object being merged is the parent + Object unmergedInstance = mergeMap.get(instance); + Object unmergedChild = mergeMap.get(childEntity); + if ( unmergedInstance!=null && unmergedChild!=null ) { + found = isFoundInParent( + property, + unmergedChild, + persister, + collectionPersister, + unmergedInstance + ); + } + } + + if ( found ) { + return ee.getId(); + } + + } + } + return null; + } + + private boolean isFoundInParent( + String property, + Object childEntity, + EntityPersister persister, + CollectionPersister collectionPersister, + Object potentialParent + ) { + Object collection = persister.getPropertyValue( + potentialParent, + property, + session.getEntityMode() + ); + return collection!=null && Hibernate.isInitialized(collection) && + collectionPersister.getCollectionType() + .contains(collection, childEntity, session); + } + + /** + * Search the persistence context for an index of the child object, + * given a collection role + */ + public Object getIndexInOwner(String entity, String property, Object childEntity, Map mergeMap) { + + EntityPersister persister = session.getFactory() + .getEntityPersister(entity); + CollectionPersister cp = session.getFactory() + .getCollectionPersister(entity + '.' + property); + Iterator entities = entityEntries.entrySet().iterator(); + while ( entities.hasNext() ) { + Map.Entry me = (Map.Entry) entities.next(); + EntityEntry ee = (EntityEntry) me.getValue(); + if ( persister.isSubclassEntityName( ee.getEntityName() ) ) { + Object instance = me.getKey(); + + Object index = getIndexInParent(property, childEntity, persister, cp, instance); + + if (index==null && mergeMap!=null) { + Object unmergedInstance = mergeMap.get(instance); + Object unmergedChild = mergeMap.get(childEntity); + if ( unmergedInstance!=null && unmergedChild!=null ) { + index = getIndexInParent(property, unmergedChild, persister, cp, unmergedInstance); + } + } + + if (index!=null) return index; + } + } + return null; + } + + private Object getIndexInParent( + String property, + Object childEntity, + EntityPersister persister, + CollectionPersister collectionPersister, + Object potentialParent + ){ + Object collection = persister.getPropertyValue( potentialParent, property, session.getEntityMode() ); + if ( collection!=null && Hibernate.isInitialized(collection) ) { + return collectionPersister.getCollectionType().indexOf(collection, childEntity); + } + else { + return null; + } + } + + /** + * Record the fact that the association belonging to the keyed + * entity is null. + */ + public void addNullProperty(EntityKey ownerKey, String propertyName) { + nullAssociations.add( new AssociationKey(ownerKey, propertyName) ); + } + + /** + * Is the association property belonging to the keyed entity null? + */ + public boolean isPropertyNull(EntityKey ownerKey, String propertyName) { + return nullAssociations.contains( new AssociationKey(ownerKey, propertyName) ); + } + + private void clearNullProperties() { + nullAssociations.clear(); + } + + public void setReadOnly(Object entity, boolean readOnly) { + EntityEntry entry = getEntry(entity); + if (entry==null) { + throw new TransientObjectException("Instance was not associated with the session"); + } + entry.setReadOnly(readOnly, entity); + hasNonReadOnlyEntities = hasNonReadOnlyEntities || !readOnly; + } + + public void replaceDelayedEntityIdentityInsertKeys(EntityKey oldKey, Serializable generatedId) { + Object entity = entitiesByKey.remove( oldKey ); + EntityEntry oldEntry = ( EntityEntry ) entityEntries.remove( entity ); + + EntityKey newKey = new EntityKey( generatedId, oldEntry.getPersister(), getSession().getEntityMode() ); + addEntity( newKey, entity ); + addEntry( + entity, + oldEntry.getStatus(), + oldEntry.getLoadedState(), + oldEntry.getRowId(), + generatedId, + oldEntry.getVersion(), + oldEntry.getLockMode(), + oldEntry.isExistsInDatabase(), + oldEntry.getPersister(), + oldEntry.isBeingReplicated(), + oldEntry.isLoadedWithLazyPropertiesUnfetched() + ); + } + + /** + * Used by the owning session to explicitly control serialization of the + * persistence context. + * + * @param oos The stream to which the persistence context should get written + * @throws IOException serialization errors. + */ + public void serialize(ObjectOutputStream oos) throws IOException { + log.trace( "serializing persistent-context" ); + + oos.writeBoolean( hasNonReadOnlyEntities ); + + oos.writeInt( entitiesByKey.size() ); + log.trace( "starting serialization of [" + entitiesByKey.size() + "] entitiesByKey entries" ); + Iterator itr = entitiesByKey.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + ( ( EntityKey ) entry.getKey() ).serialize( oos ); + oos.writeObject( entry.getValue() ); + } + + oos.writeInt( entitiesByUniqueKey.size() ); + log.trace( "starting serialization of [" + entitiesByUniqueKey.size() + "] entitiesByUniqueKey entries" ); + itr = entitiesByUniqueKey.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + ( ( EntityUniqueKey ) entry.getKey() ).serialize( oos ); + oos.writeObject( entry.getValue() ); + } + + oos.writeInt( proxiesByKey.size() ); + log.trace( "starting serialization of [" + proxiesByKey.size() + "] proxiesByKey entries" ); + itr = proxiesByKey.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + ( ( EntityKey ) entry.getKey() ).serialize( oos ); + oos.writeObject( entry.getValue() ); + } + + oos.writeInt( entitySnapshotsByKey.size() ); + log.trace( "starting serialization of [" + entitySnapshotsByKey.size() + "] entitySnapshotsByKey entries" ); + itr = entitySnapshotsByKey.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + ( ( EntityKey ) entry.getKey() ).serialize( oos ); + oos.writeObject( entry.getValue() ); + } + + oos.writeInt( entityEntries.size() ); + log.trace( "starting serialization of [" + entityEntries.size() + "] entityEntries entries" ); + itr = entityEntries.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + oos.writeObject( entry.getKey() ); + ( ( EntityEntry ) entry.getValue() ).serialize( oos ); + } + + oos.writeInt( collectionsByKey.size() ); + log.trace( "starting serialization of [" + collectionsByKey.size() + "] collectionsByKey entries" ); + itr = collectionsByKey.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + ( ( CollectionKey ) entry.getKey() ).serialize( oos ); + oos.writeObject( entry.getValue() ); + } + + oos.writeInt( collectionEntries.size() ); + log.trace( "starting serialization of [" + collectionEntries.size() + "] collectionEntries entries" ); + itr = collectionEntries.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + oos.writeObject( entry.getKey() ); + ( ( CollectionEntry ) entry.getValue() ).serialize( oos ); + } + + oos.writeInt( arrayHolders.size() ); + log.trace( "starting serialization of [" + arrayHolders.size() + "] arrayHolders entries" ); + itr = arrayHolders.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + oos.writeObject( entry.getKey() ); + oos.writeObject( entry.getValue() ); + } + + oos.writeInt( nullifiableEntityKeys.size() ); + log.trace( "starting serialization of [" + nullifiableEntityKeys.size() + "] nullifiableEntityKeys entries" ); + itr = nullifiableEntityKeys.iterator(); + while ( itr.hasNext() ) { + EntityKey entry = ( EntityKey ) itr.next(); + entry.serialize( oos ); + } + } + + public static StatefulPersistenceContext deserialize( + ObjectInputStream ois, + SessionImplementor session) throws IOException, ClassNotFoundException { + log.trace( "deserializing persistent-context" ); + StatefulPersistenceContext rtn = new StatefulPersistenceContext( session ); + + // during deserialization, we need to reconnect all proxies and + // collections to this session, as well as the EntityEntry and + // CollectionEntry instances; these associations are transient + // because serialization is used for different things. + + try { + // todo : we can actually just determine this from the incoming EntityEntry-s + rtn.hasNonReadOnlyEntities = ois.readBoolean(); + + int count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] entitiesByKey entries" ); + rtn.entitiesByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); + for ( int i = 0; i < count; i++ ) { + rtn.entitiesByKey.put( EntityKey.deserialize( ois, session ), ois.readObject() ); + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] entitiesByUniqueKey entries" ); + rtn.entitiesByUniqueKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); + for ( int i = 0; i < count; i++ ) { + rtn.entitiesByUniqueKey.put( EntityUniqueKey.deserialize( ois, session ), ois.readObject() ); + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] proxiesByKey entries" ); + rtn.proxiesByKey = new ReferenceMap( ReferenceMap.HARD, ReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f ); + for ( int i = 0; i < count; i++ ) { + EntityKey ek = EntityKey.deserialize( ois, session ); + Object proxy = ois.readObject(); + if ( proxy instanceof HibernateProxy ) { + ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().setSession( session ); + rtn.proxiesByKey.put( ek, proxy ); + } + else { + log.trace( "encountered prunded proxy" ); + } + // otherwise, the proxy was pruned during the serialization process + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] entitySnapshotsByKey entries" ); + rtn.entitySnapshotsByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); + for ( int i = 0; i < count; i++ ) { + rtn.entitySnapshotsByKey.put( EntityKey.deserialize( ois, session ), ois.readObject() ); + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] entityEntries entries" ); + rtn.entityEntries = IdentityMap.instantiateSequenced( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); + for ( int i = 0; i < count; i++ ) { + Object entity = ois.readObject(); + EntityEntry entry = EntityEntry.deserialize( ois, session ); + rtn.entityEntries.put( entity, entry ); + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] collectionsByKey entries" ); + rtn.collectionsByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); + for ( int i = 0; i < count; i++ ) { + rtn.collectionsByKey.put( CollectionKey.deserialize( ois, session ), ois.readObject() ); + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] collectionEntries entries" ); + rtn.collectionEntries = IdentityMap.instantiateSequenced( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); + for ( int i = 0; i < count; i++ ) { + final PersistentCollection pc = ( PersistentCollection ) ois.readObject(); + final CollectionEntry ce = CollectionEntry.deserialize( ois, session ); + pc.setCurrentSession( session ); + rtn.collectionEntries.put( pc, ce ); + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] arrayHolders entries" ); + rtn.arrayHolders = IdentityMap.instantiate( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); + for ( int i = 0; i < count; i++ ) { + rtn.arrayHolders.put( ois.readObject(), ois.readObject() ); + } + + count = ois.readInt(); + log.trace( "staring deserialization of [" + count + "] nullifiableEntityKeys entries" ); + rtn.nullifiableEntityKeys = new HashSet(); + for ( int i = 0; i < count; i++ ) { + rtn.nullifiableEntityKeys.add( EntityKey.deserialize( ois, session ) ); + } + + } + catch ( HibernateException he ) { + throw new InvalidObjectException( he.getMessage() ); + } + + return rtn; + } +} diff --git a/src/org/hibernate/engine/Status.java b/src/org/hibernate/engine/Status.java new file mode 100644 index 0000000000..4f009a3db7 --- /dev/null +++ b/src/org/hibernate/engine/Status.java @@ -0,0 +1,46 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.io.ObjectStreamException; +import java.io.InvalidObjectException; + +/** + * Represents the status of an entity with respect to + * this session. These statuses are for internal + * book-keeping only and are not intended to represent + * any notion that is visible to the _application_. + */ +public final class Status implements Serializable { + + public static final Status MANAGED = new Status( "MANAGED" ); + public static final Status READ_ONLY = new Status( "READ_ONLY" ); + public static final Status DELETED = new Status( "DELETED" ); + public static final Status GONE = new Status( "GONE" ); + public static final Status LOADING = new Status( "LOADING" ); + public static final Status SAVING = new Status( "SAVING" ); + + private String name; + + private Status(String name) { + this.name = name; + } + + public String toString() { + return name; + } + + private Object readResolve() throws ObjectStreamException { + return parse( name ); + } + + public static Status parse(String name) throws InvalidObjectException { + if ( name.equals(MANAGED.name) ) return MANAGED; + if ( name.equals(READ_ONLY.name) ) return READ_ONLY; + if ( name.equals(DELETED.name) ) return DELETED; + if ( name.equals(GONE.name) ) return GONE; + if ( name.equals(LOADING.name) ) return LOADING; + if ( name.equals(SAVING.name) ) return SAVING; + throw new InvalidObjectException( "invalid Status" ); + } +} diff --git a/src/org/hibernate/engine/SubselectFetch.java b/src/org/hibernate/engine/SubselectFetch.java new file mode 100755 index 0000000000..76bc587ac7 --- /dev/null +++ b/src/org/hibernate/engine/SubselectFetch.java @@ -0,0 +1,78 @@ +//$Id$ +package org.hibernate.engine; + +import java.util.Map; +import java.util.Set; + +import org.hibernate.persister.entity.Loadable; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.util.StringHelper; + +/** + * @author Gavin King + */ +public class SubselectFetch { + private final Set resultingEntityKeys; + private final String queryString; + private final String alias; + private final Loadable loadable; + private final QueryParameters queryParameters; + private final Map namedParameterLocMap; + + public SubselectFetch( + //final String queryString, + final String alias, + final Loadable loadable, + final QueryParameters queryParameters, + final Set resultingEntityKeys, + final Map namedParameterLocMap + ) { + this.resultingEntityKeys = resultingEntityKeys; + this.queryParameters = queryParameters; + this.namedParameterLocMap = namedParameterLocMap; + this.loadable = loadable; + this.alias = alias; + + //TODO: ugly here: + final String queryString = queryParameters.getFilteredSQL(); + int fromIndex = queryString.indexOf(" from "); + int orderByIndex = queryString.lastIndexOf("order by"); + this.queryString = orderByIndex>0 ? + queryString.substring(fromIndex, orderByIndex) : + queryString.substring(fromIndex); + + } + + public QueryParameters getQueryParameters() { + return queryParameters; + } + + /** + * Get the Set of EntityKeys + */ + public Set getResult() { + return resultingEntityKeys; + } + + public String toSubselectString(String ukname) { + + String[] joinColumns = ukname==null ? + StringHelper.qualify( alias, loadable.getIdentifierColumnNames() ) : + ( (PropertyMapping) loadable ).toColumns(alias, ukname); + + return new StringBuffer() + .append("select ") + .append( StringHelper.join(", ", joinColumns) ) + .append(queryString) + .toString(); + } + + public String toString() { + return "SubselectFetch(" + queryString + ')'; + } + + public Map getNamedParameterLocMap() { + return namedParameterLocMap; + } + +} diff --git a/src/org/hibernate/engine/TransactionHelper.java b/src/org/hibernate/engine/TransactionHelper.java new file mode 100644 index 0000000000..0ae612bdf7 --- /dev/null +++ b/src/org/hibernate/engine/TransactionHelper.java @@ -0,0 +1,54 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.sql.Connection; +import java.sql.SQLException; + +import org.hibernate.HibernateException; +import org.hibernate.engine.transaction.IsolatedWork; +import org.hibernate.engine.transaction.Isolater; +import org.hibernate.exception.JDBCExceptionHelper; + +/** + * Allows work to be done outside the current transaction, by suspending it, + * and performing work in a new transaction + * + * @author Emmanuel Bernard + */ +public abstract class TransactionHelper { + + // todo : remove this and just have subclasses use Isolater/IsolatedWork directly... + + /** + * The work to be done + */ + protected abstract Serializable doWorkInCurrentTransaction(Connection conn, String sql) throws SQLException; + + /** + * Suspend the current transaction and perform work in a new transaction + */ + public Serializable doWorkInNewTransaction(final SessionImplementor session) + throws HibernateException { + class Work implements IsolatedWork { + Serializable generatedValue; + public void doWork(Connection connection) throws HibernateException { + String sql = null; + try { + generatedValue = doWorkInCurrentTransaction( connection, sql ); + } + catch( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not get or update next value", + sql + ); + } + } + } + Work work = new Work(); + Isolater.doIsolatedWork( work, session ); + return work.generatedValue; + } +} diff --git a/src/org/hibernate/engine/TwoPhaseLoad.java b/src/org/hibernate/engine/TwoPhaseLoad.java new file mode 100755 index 0000000000..18424c6ef2 --- /dev/null +++ b/src/org/hibernate/engine/TwoPhaseLoad.java @@ -0,0 +1,276 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.CacheMode; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.cache.CacheKey; +import org.hibernate.cache.entry.CacheEntry; +import org.hibernate.event.PostLoadEvent; +import org.hibernate.event.PostLoadEventListener; +import org.hibernate.event.PreLoadEvent; +import org.hibernate.event.PreLoadEventListener; +import org.hibernate.intercept.LazyPropertyInitializer; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.property.BackrefPropertyAccessor; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; + +/** + * Functionality relating to Hibernate's two-phase loading process, + * that may be reused by persisters that do not use the Loader + * framework + * + * @author Gavin King + */ +public final class TwoPhaseLoad { + + private static final Log log = LogFactory.getLog(TwoPhaseLoad.class); + + private TwoPhaseLoad() {} + + /** + * Register the "hydrated" state of an entity instance, after the first step of 2-phase loading. + * + * Add the "hydrated state" (an array) of an uninitialized entity to the session. We don't try + * to resolve any associations yet, because there might be other entities waiting to be + * read from the JDBC result set we are currently processing + */ + public static void postHydrate( + final EntityPersister persister, + final Serializable id, + final Object[] values, + final Object rowId, + final Object object, + final LockMode lockMode, + final boolean lazyPropertiesAreUnfetched, + final SessionImplementor session) + throws HibernateException { + + Object version = Versioning.getVersion(values, persister); + session.getPersistenceContext().addEntry( + object, + Status.LOADING, + values, + rowId, + id, + version, + lockMode, + true, + persister, + false, + lazyPropertiesAreUnfetched + ); + + if ( log.isTraceEnabled() && version!=null ) { + String versionStr = persister.isVersioned() + ? persister.getVersionType().toLoggableString( version, session.getFactory() ) + : "null"; + log.trace( "Version: " + versionStr ); + } + + } + + /** + * Perform the second step of 2-phase load. Fully initialize the entity + * instance. + * + * After processing a JDBC result set, we "resolve" all the associations + * between the entities which were instantiated and had their state + * "hydrated" into an array + */ + public static void initializeEntity( + final Object entity, + final boolean readOnly, + final SessionImplementor session, + final PreLoadEvent preLoadEvent, + final PostLoadEvent postLoadEvent) throws HibernateException { + + //TODO: Should this be an InitializeEntityEventListener??? (watch out for performance!) + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + EntityEntry entityEntry = persistenceContext.getEntry(entity); + if ( entityEntry == null ) { + throw new AssertionFailure( "possible non-threadsafe access to the session" ); + } + EntityPersister persister = entityEntry.getPersister(); + Serializable id = entityEntry.getId(); + Object[] hydratedState = entityEntry.getLoadedState(); + + if ( log.isDebugEnabled() ) + log.debug( + "resolving associations for " + + MessageHelper.infoString(persister, id, session.getFactory()) + ); + + Type[] types = persister.getPropertyTypes(); + for ( int i = 0; i < hydratedState.length; i++ ) { + final Object value = hydratedState[i]; + if ( value!=LazyPropertyInitializer.UNFETCHED_PROPERTY && value!=BackrefPropertyAccessor.UNKNOWN ) { + hydratedState[i] = types[i].resolve( value, session, entity ); + } + } + + //Must occur after resolving identifiers! + if ( session.isEventSource() ) { + preLoadEvent.setEntity(entity).setState(hydratedState).setId(id).setPersister(persister); + PreLoadEventListener[] listeners = session.getListeners().getPreLoadEventListeners(); + for ( int i = 0; i < listeners.length; i++ ) { + listeners[i].onPreLoad(preLoadEvent); + } + } + + persister.setPropertyValues( entity, hydratedState, session.getEntityMode() ); + + final SessionFactoryImplementor factory = session.getFactory(); + if ( persister.hasCache() && session.getCacheMode().isPutEnabled() ) { + + if ( log.isDebugEnabled() ) + log.debug( + "adding entity to second-level cache: " + + MessageHelper.infoString( persister, id, session.getFactory() ) + ); + + Object version = Versioning.getVersion(hydratedState, persister); + CacheEntry entry = new CacheEntry( + hydratedState, + persister, + entityEntry.isLoadedWithLazyPropertiesUnfetched(), + version, + session, + entity + ); + CacheKey cacheKey = new CacheKey( + id, + persister.getIdentifierType(), + persister.getRootEntityName(), + session.getEntityMode(), + session.getFactory() + ); + boolean put = persister.getCache().put( + cacheKey, + persister.getCacheEntryStructure().structure(entry), + session.getTimestamp(), + version, + persister.isVersioned() ? + persister.getVersionType().getComparator() : + null, + useMinimalPuts(session, entityEntry) + ); //we could use persister.hasLazyProperties() instead of true + + if ( put && factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().secondLevelCachePut( persister.getCache().getRegionName() ); + } + } + + if ( readOnly || !persister.isMutable() ) { + //no need to take a snapshot - this is a + //performance optimization, but not really + //important, except for entities with huge + //mutable property values + persistenceContext.setEntryStatus(entityEntry, Status.READ_ONLY); + } + else { + //take a snapshot + TypeFactory.deepCopy( + hydratedState, + persister.getPropertyTypes(), + persister.getPropertyUpdateability(), + hydratedState, //after setting values to object, entityMode + session + ); + persistenceContext.setEntryStatus(entityEntry, Status.MANAGED); + } + + persister.afterInitialize( + entity, + entityEntry.isLoadedWithLazyPropertiesUnfetched(), + session + ); + + if ( session.isEventSource() ) { + postLoadEvent.setEntity(entity).setId(id).setPersister(persister); + PostLoadEventListener[] listeners = session.getListeners().getPostLoadEventListeners(); + for ( int i = 0; i < listeners.length; i++ ) { + listeners[i].onPostLoad(postLoadEvent); + } + } + + if ( log.isDebugEnabled() ) + log.debug( + "done materializing entity " + + MessageHelper.infoString( persister, id, session.getFactory() ) + ); + + if ( factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().loadEntity( persister.getEntityName() ); + } + + } + + private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) { + return ( session.getFactory().getSettings().isMinimalPutsEnabled() && + session.getCacheMode()!=CacheMode.REFRESH ) || + ( entityEntry.getPersister().hasLazyProperties() && + entityEntry.isLoadedWithLazyPropertiesUnfetched() && + entityEntry.getPersister().isLazyPropertiesCacheable() ); + } + + /** + * Add an uninitialized instance of an entity class, as a placeholder to ensure object + * identity. Must be called before postHydrate(). + * + * Create a "temporary" entry for a newly instantiated entity. The entity is uninitialized, + * but we need the mapping from id to instance in order to guarantee uniqueness. + */ + public static void addUninitializedEntity( + final EntityKey key, + final Object object, + final EntityPersister persister, + final LockMode lockMode, + final boolean lazyPropertiesAreUnfetched, + final SessionImplementor session + ) { + session.getPersistenceContext().addEntity( + object, + Status.LOADING, + null, + key, + null, + lockMode, + true, + persister, + false, + lazyPropertiesAreUnfetched + ); + } + + public static void addUninitializedCachedEntity( + final EntityKey key, + final Object object, + final EntityPersister persister, + final LockMode lockMode, + final boolean lazyPropertiesAreUnfetched, + final Object version, + final SessionImplementor session + ) { + session.getPersistenceContext().addEntity( + object, + Status.LOADING, + null, + key, + version, + lockMode, + true, + persister, + false, + lazyPropertiesAreUnfetched + ); + } +} diff --git a/src/org/hibernate/engine/TypedValue.java b/src/org/hibernate/engine/TypedValue.java new file mode 100644 index 0000000000..4155f8ccca --- /dev/null +++ b/src/org/hibernate/engine/TypedValue.java @@ -0,0 +1,60 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; + +import org.hibernate.EntityMode; +import org.hibernate.type.Type; + +/** + * An ordered pair of a value and its Hibernate type. + * + * @see org.hibernate.type.Type + * @author Gavin King + */ +public final class TypedValue implements Serializable { + private final Type type; + private final Object value; + private final EntityMode entityMode; + + public TypedValue(Type type, Object value, EntityMode entityMode) { + this.type = type; + this.value=value; + this.entityMode = entityMode; + } + + public Object getValue() { + return value; + } + + public Type getType() { + return type; + } + + public String toString() { + return value==null ? "null" : value.toString(); + } + + public int hashCode() { + //int result = 17; + //result = 37 * result + type.hashCode(); + //result = 37 * result + ( value==null ? 0 : value.hashCode() ); + //return result; + return value==null ? 0 : type.getHashCode(value, entityMode); + } + + public boolean equals(Object other) { + if ( !(other instanceof TypedValue) ) return false; + TypedValue that = (TypedValue) other; + /*return that.type.equals(type) && + EqualsHelper.equals(that.value, value);*/ + return type.getReturnedClass() == that.type.getReturnedClass() && + type.isEqual(that.value, value, entityMode); + } + +} + + + + + diff --git a/src/org/hibernate/engine/UnsavedValueFactory.java b/src/org/hibernate/engine/UnsavedValueFactory.java new file mode 100755 index 0000000000..85df899cf9 --- /dev/null +++ b/src/org/hibernate/engine/UnsavedValueFactory.java @@ -0,0 +1,115 @@ +//$Id$ +package org.hibernate.engine; + +import java.io.Serializable; +import java.lang.reflect.Constructor; + +import org.hibernate.InstantiationException; +import org.hibernate.MappingException; +import org.hibernate.property.Getter; +import org.hibernate.type.IdentifierType; +import org.hibernate.type.PrimitiveType; +import org.hibernate.type.Type; +import org.hibernate.type.VersionType; + +/** + * @author Gavin King + */ +public class UnsavedValueFactory { + + private static Object instantiate(Constructor constructor) { + try { + return constructor.newInstance(null); + } + catch (Exception e) { + throw new InstantiationException( "could not instantiate test object", constructor.getDeclaringClass(), e ); + } + } + + /** + * Return an IdentifierValue for the specified unsaved-value. If none is specified, + * guess the unsaved value by instantiating a test instance of the class and + * reading it's id property, or if that is not possible, using the java default + * value for the type + */ + public static IdentifierValue getUnsavedIdentifierValue( + String unsavedValue, + Getter identifierGetter, + Type identifierType, + Constructor constructor) { + + if ( unsavedValue == null ) { + if ( identifierGetter!=null && constructor!=null ) { + // use the id value of a newly instantiated instance as the unsaved-value + Serializable defaultValue = (Serializable) identifierGetter.get( instantiate(constructor) ); + return new IdentifierValue( defaultValue ); + } + else if ( identifierGetter != null && (identifierType instanceof PrimitiveType) ) { + Serializable defaultValue = ( ( PrimitiveType ) identifierType ).getDefaultValue(); + return new IdentifierValue( defaultValue ); + } + else { + return IdentifierValue.NULL; + } + } + else if ( "null".equals( unsavedValue ) ) { + return IdentifierValue.NULL; + } + else if ( "undefined".equals( unsavedValue ) ) { + return IdentifierValue.UNDEFINED; + } + else if ( "none".equals( unsavedValue ) ) { + return IdentifierValue.NONE; + } + else if ( "any".equals( unsavedValue ) ) { + return IdentifierValue.ANY; + } + else { + try { + return new IdentifierValue( ( Serializable ) ( ( IdentifierType ) identifierType ).stringToObject( unsavedValue ) ); + } + catch ( ClassCastException cce ) { + throw new MappingException( "Bad identifier type: " + identifierType.getName() ); + } + catch ( Exception e ) { + throw new MappingException( "Could not parse identifier unsaved-value: " + unsavedValue ); + } + } + } + + public static VersionValue getUnsavedVersionValue( + String versionUnsavedValue, + Getter versionGetter, + VersionType versionType, + Constructor constructor) { + + if ( versionUnsavedValue == null ) { + if ( constructor!=null ) { + Object defaultValue = versionGetter.get( instantiate(constructor) ); + // if the version of a newly instantiated object is not the same + // as the version seed value, use that as the unsaved-value + return versionType.isEqual( versionType.seed( null ), defaultValue ) ? + VersionValue.UNDEFINED : + new VersionValue( defaultValue ); + } + else { + return VersionValue.UNDEFINED; + } + } + else if ( "undefined".equals( versionUnsavedValue ) ) { + return VersionValue.UNDEFINED; + } + else if ( "null".equals( versionUnsavedValue ) ) { + return VersionValue.NULL; + } + else if ( "negative".equals( versionUnsavedValue ) ) { + return VersionValue.NEGATIVE; + } + else { + // this should not happen since the DTD prevents it + throw new MappingException( "Could not parse version unsaved-value: " + versionUnsavedValue ); + } + + } + +} diff --git a/src/org/hibernate/engine/ValueInclusion.java b/src/org/hibernate/engine/ValueInclusion.java new file mode 100644 index 0000000000..64f1d6677d --- /dev/null +++ b/src/org/hibernate/engine/ValueInclusion.java @@ -0,0 +1,51 @@ +package org.hibernate.engine; + +import java.io.Serializable; +import java.io.ObjectStreamException; +import java.io.StreamCorruptedException; + +/** + * An enum of the different ways a value might be "included". + *

    + * This is really an expanded true/false notion with "PARTIAL" being the + * expansion. PARTIAL deals with components in the cases where + * parts of the referenced component might define inclusion, but the + * component overall does not. + * + * @author Steve Ebersole + */ +public class ValueInclusion implements Serializable { + + public static final ValueInclusion NONE = new ValueInclusion( "none" ); + public static final ValueInclusion FULL = new ValueInclusion( "full" ); + public static final ValueInclusion PARTIAL = new ValueInclusion( "partial" ); + + private final String name; + + public ValueInclusion(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public String toString() { + return "ValueInclusion[" + name + "]"; + } + + private Object readResolve() throws ObjectStreamException { + if ( name.equals( NONE.name ) ) { + return NONE; + } + else if ( name.equals( FULL.name ) ) { + return FULL; + } + else if ( name.equals( PARTIAL.name ) ) { + return PARTIAL; + } + else { + throw new StreamCorruptedException( "unrecognized value inclusion [" + name + "]" ); + } + } +} diff --git a/src/org/hibernate/engine/VersionValue.java b/src/org/hibernate/engine/VersionValue.java new file mode 100755 index 0000000000..4e0ef3d333 --- /dev/null +++ b/src/org/hibernate/engine/VersionValue.java @@ -0,0 +1,109 @@ +//$Id$ +package org.hibernate.engine; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.MappingException; +import org.hibernate.id.IdentifierGeneratorFactory; + +/** + * A strategy for determining if a version value is an version of + * a new transient instance or a previously persistent transient instance. + * The strategy is determined by the unsaved-value attribute in + * the mapping file. + * + * @author Gavin King + */ +public class VersionValue { + + private static final Log log = LogFactory.getLog(VersionValue.class); + + private final Object value; + /** + * Assume the transient instance is newly instantiated if the version + * is null, otherwise assume it is a detached instance. + */ + public static final VersionValue NULL = new VersionValue() { + public final Boolean isUnsaved(Object version) { + log.trace("version unsaved-value strategy NULL"); + return version==null ? Boolean.TRUE : Boolean.FALSE; + } + public Object getDefaultValue(Object currentValue) { + return null; + } + public String toString() { + return "VERSION_SAVE_NULL"; + } + }; + /** + * Assume the transient instance is newly instantiated if the version + * is null, otherwise defer to the identifier unsaved-value. + */ + public static final VersionValue UNDEFINED = new VersionValue() { + public final Boolean isUnsaved(Object version) { + log.trace("version unsaved-value strategy UNDEFINED"); + return version==null ? Boolean.TRUE : null; + } + public Object getDefaultValue(Object currentValue) { + return currentValue; + } + public String toString() { + return "VERSION_UNDEFINED"; + } + }; + /** + * Assume the transient instance is newly instantiated if the version + * is negative, otherwise assume it is a detached instance. + */ + public static final VersionValue NEGATIVE = new VersionValue() { + + public final Boolean isUnsaved(Object version) throws MappingException { + log.trace("version unsaved-value strategy NEGATIVE"); + if (version==null) return Boolean.TRUE; + if (version instanceof Number) { + return ( (Number) version ).longValue() < 0l ? Boolean.TRUE : Boolean.FALSE; + } + else { + throw new MappingException("unsaved-value NEGATIVE may only be used with short, int and long types"); + } + } + public Object getDefaultValue(Object currentValue) { + return IdentifierGeneratorFactory.createNumber( -1l, currentValue.getClass() ); + } + public String toString() { + return "VERSION_NEGATIVE"; + } + }; + + protected VersionValue() { + this.value = null; + } + + /** + * Assume the transient instance is newly instantiated if + * its version is null or equal to value + * @param value value to compare to + */ + public VersionValue(Object value) { + this.value = value; + } + + /** + * Does the given version belong to a new instance? + * + * @param version version to check + * @return true is unsaved, false is saved, null is undefined + */ + public Boolean isUnsaved(Object version) throws MappingException { + if ( log.isTraceEnabled() ) log.trace("version unsaved-value: " + value); + return version==null || version.equals(value) ? Boolean.TRUE : Boolean.FALSE; + } + + public Object getDefaultValue(Object currentValue) { + return value; + } + + public String toString() { + return "version unsaved-value: " + value; + } +} \ No newline at end of file diff --git a/src/org/hibernate/engine/Versioning.java b/src/org/hibernate/engine/Versioning.java new file mode 100644 index 0000000000..5152fc89c4 --- /dev/null +++ b/src/org/hibernate/engine/Versioning.java @@ -0,0 +1,172 @@ +//$Id$ +package org.hibernate.engine; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.VersionType; + +/** + * Utilities for dealing with optimisitic locking values. + * + * @author Gavin King + */ +public final class Versioning { + /** + * Apply no optimistic locking + */ + public static final int OPTIMISTIC_LOCK_NONE = -1; + + /** + * Apply optimisitc locking based on the defined version or timestamp + * property. + */ + public static final int OPTIMISTIC_LOCK_VERSION = 0; + + /** + * Apply optimisitc locking based on the a current vs. snapshot comparison + * of all properties. + */ + public static final int OPTIMISTIC_LOCK_ALL = 2; + + /** + * Apply optimisitc locking based on the a current vs. snapshot comparison + * of dirty properties. + */ + public static final int OPTIMISTIC_LOCK_DIRTY = 1; + + private static final Log log = LogFactory.getLog( Versioning.class ); + + /** + * Private constructor disallowing instantiation. + */ + private Versioning() {} + + /** + * Create an initial optimisitc locking value according the {@link VersionType} + * contract for the version property. + * + * @param versionType The version type. + * @param session The originating session + * @return The initial optimisitc locking value + */ + private static Object seed(VersionType versionType, SessionImplementor session) { + Object seed = versionType.seed( session ); + if ( log.isTraceEnabled() ) log.trace("Seeding: " + seed); + return seed; + } + + /** + * Create an initial optimisitc locking value according the {@link VersionType} + * contract for the version property if required and inject it into + * the snapshot state. + * + * @param fields The current snapshot state + * @param versionProperty The index of the version property + * @param versionType The version type + * @param session The orginating session + * @return True if we injected a new version value into the fields array; false + * otherwise. + */ + public static boolean seedVersion( + Object[] fields, + int versionProperty, + VersionType versionType, + SessionImplementor session) { + Object initialVersion = fields[versionProperty]; + if ( + initialVersion==null || + // This next bit is to allow for both unsaved-value="negative" + // and for "older" behavior where version number did not get + // seeded if it was already set in the object + // TODO: shift it into unsaved-value strategy + ( (initialVersion instanceof Number) && ( (Number) initialVersion ).longValue()<0 ) + ) { + fields[versionProperty] = seed( versionType, session ); + return true; + } + else { + if ( log.isTraceEnabled() ) { + log.trace( "using initial version: " + initialVersion ); + } + return false; + } + } + + + /** + * Generate the next increment in the optimisitc locking value according + * the {@link VersionType} contract for the version property. + * + * @param version The current version + * @param versionType The version type + * @param session The originating session + * @return The incremented optimistic locking value. + */ + public static Object increment(Object version, VersionType versionType, SessionImplementor session) { + Object next = versionType.next( version, session ); + if ( log.isTraceEnabled() ) { + log.trace( + "Incrementing: " + + versionType.toLoggableString( version, session.getFactory() ) + + " to " + + versionType.toLoggableString( next, session.getFactory() ) + ); + } + return next; + } + + /** + * Inject the optimisitc locking value into the entity state snapshot. + * + * @param fields The state snapshot + * @param version The optimisitc locking value + * @param persister The entity persister + */ + public static void setVersion(Object[] fields, Object version, EntityPersister persister) { + if ( !persister.isVersioned() ) { + return; + } + fields[ persister.getVersionProperty() ] = version; + } + + /** + * Extract the optimisitc locking value out of the entity state snapshot. + * + * @param fields The state snapshot + * @param persister The entity persister + * @return The extracted optimisitc locking value + */ + public static Object getVersion(Object[] fields, EntityPersister persister) { + if ( !persister.isVersioned() ) { + return null; + } + return fields[ persister.getVersionProperty() ]; + } + + /** + * Do we need to increment the version number, given the dirty properties? + * + * @param dirtyProperties The array of property indexes which were deemed dirty + * @param hasDirtyCollections Were any collections found to be dirty (structurally changed) + * @param propertyVersionability An array indicating versionability of each property. + * @return True if a version increment is required; false otherwise. + */ + public static boolean isVersionIncrementRequired( + final int[] dirtyProperties, + final boolean hasDirtyCollections, + final boolean[] propertyVersionability) { + if ( hasDirtyCollections ) { + return true; + } + for ( int i = 0; i < dirtyProperties.length; i++ ) { + if ( propertyVersionability[ dirtyProperties[i] ] ) { + return true; + } + } + return false; + } + + +} diff --git a/src/org/hibernate/engine/loading/CollectionLoadContext.java b/src/org/hibernate/engine/loading/CollectionLoadContext.java new file mode 100644 index 0000000000..4e1dd59b17 --- /dev/null +++ b/src/org/hibernate/engine/loading/CollectionLoadContext.java @@ -0,0 +1,339 @@ +package org.hibernate.engine.loading; + +import java.sql.ResultSet; +import java.io.Serializable; +import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.Set; +import java.util.HashSet; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.EntityMode; +import org.hibernate.CacheMode; +import org.hibernate.cache.entry.CollectionCacheEntry; +import org.hibernate.cache.CacheKey; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.engine.CollectionKey; +import org.hibernate.engine.Status; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.CollectionEntry; +import org.hibernate.engine.SessionFactoryImplementor; + +/** + * Represents state associated with the processing of a given {@link ResultSet} + * in regards to loading collections. + *

    + * Another implementation option to consider is to not expose {@link ResultSet}s + * directly (in the JDBC redesign) but to always "wrap" them and apply a + * [series of] context[s] to that wrapper. + * + * @author Steve Ebersole + */ +public class CollectionLoadContext { + private static final Log log = LogFactory.getLog( CollectionLoadContext.class ); + + private final LoadContexts loadContexts; + private final ResultSet resultSet; + private Set localLoadingCollectionKeys = new HashSet(); + + /** + * Creates a collection load context for the given result set. + * + * @param loadContexts Callback to other collection load contexts. + * @param resultSet The result set this is "wrapping". + */ + public CollectionLoadContext(LoadContexts loadContexts, ResultSet resultSet) { + this.loadContexts = loadContexts; + this.resultSet = resultSet; + } + + public ResultSet getResultSet() { + return resultSet; + } + + public LoadContexts getLoadContext() { + return loadContexts; + } + + /** + * Retrieve the collection that is being loaded as part of processing this + * result set. + *

    + * Basically, there are two valid return values from this method:

      + *
    • an instance of {@link PersistentCollection} which indicates to + * continue loading the result set row data into that returned collection + * instance; this may be either an instance already associated and in the + * midst of being loaded, or a newly instantiated instance as a matching + * associated collection was not found.
    • + *
    • null indicates to ignore the corresponding result set row + * data relating to the requested collection; this indicates that either + * the collection was found to already be associated with the persistence + * context in a fully loaded state, or it was found in a loading state + * associated with another result set processing context.
    • + *
    + * + * @param persister The persister for the collection being requested. + * @param key The key of the collection being requested. + * + * @return The loading collection (see discussion above). + */ + public PersistentCollection getLoadingCollection(final CollectionPersister persister, final Serializable key) { + final EntityMode em = loadContexts.getPersistenceContext().getSession().getEntityMode(); + final CollectionKey collectionKey = new CollectionKey( persister, key, em ); + if ( log.isTraceEnabled() ) { + log.trace( "starting attempt to find loading collection [" + MessageHelper.collectionInfoString( persister.getRole(), key ) + "]" ); + } + final LoadingCollectionEntry loadingCollectionEntry = loadContexts.locateLoadingCollectionEntry( collectionKey ); + if ( loadingCollectionEntry == null ) { + // look for existing collection as part of the persistence context + PersistentCollection collection = loadContexts.getPersistenceContext().getCollection( collectionKey ); + if ( collection != null ) { + if ( collection.wasInitialized() ) { + log.trace( "collection already initialized; ignoring" ); + return null; // ignore this row of results! Note the early exit + } + else { + // initialize this collection + log.trace( "collection not yet initialized; initializing" ); + } + } + else { + Object owner = loadContexts.getPersistenceContext().getCollectionOwner( key, persister ); + final boolean newlySavedEntity = owner != null + && loadContexts.getPersistenceContext().getEntry( owner ).getStatus() != Status.LOADING + && em != EntityMode.DOM4J; + if ( newlySavedEntity ) { + // important, to account for newly saved entities in query + // todo : some kind of check for new status... + log.trace( "owning entity already loaded; ignoring" ); + return null; + } + else { + // create one + if ( log.isTraceEnabled() ) { + log.trace( "instantiating new collection [key=" + key + ", rs=" + resultSet + "]" ); + } + collection = persister.getCollectionType() + .instantiate( loadContexts.getPersistenceContext().getSession(), persister, key ); + } + } + collection.beforeInitialize( persister, -1 ); + collection.beginRead(); + localLoadingCollectionKeys.add( collectionKey ); + loadContexts.registerLoadingCollectionXRef( collectionKey, new LoadingCollectionEntry( resultSet, persister, key, collection ) ); + return collection; + } + else { + if ( loadingCollectionEntry.getResultSet() == resultSet ) { + log.trace( "found loading collection bound to current result set processing; reading row" ); + return loadingCollectionEntry.getCollection(); + } + else { + // ignore this row, the collection is in process of + // being loaded somewhere further "up" the stack + log.trace( "collection is already being initialized; ignoring row" ); + return null; + } + } + } + + /** + * Finish the process of collection-loading for this bound result set. Mainly this + * involves cleaning up resources and notifying the collections that loading is + * complete. + * + * @param persister The persister for which to complete loading. + */ + public void endLoadingCollections(CollectionPersister persister) { + SessionImplementor session = getLoadContext().getPersistenceContext().getSession(); + if ( !loadContexts.hasLoadingCollectionEntries() + || localLoadingCollectionKeys.isEmpty() ) { + return; + } + + // in an effort to avoid concurrent-modification-exceptions (from + // potential recursive calls back through here as a result of the + // eventual call to PersistentCollection#endRead), we scan the + // internal loadingCollections map for matches and store those matches + // in a temp collection. the temp collection is then used to "drive" + // the #endRead processing. + List matches = null; + Iterator iter = localLoadingCollectionKeys.iterator(); + while ( iter.hasNext() ) { + final CollectionKey collectionKey = (CollectionKey) iter.next(); + final LoadingCollectionEntry lce = loadContexts.locateLoadingCollectionEntry( collectionKey ); + if ( lce == null) { + log.warn( "In CollectionLoadContext#endLoadingCollections, localLoadingCollectionKeys contained [" + collectionKey + "], but no LoadingCollectionEntry was found in loadContexts" ); + } + else if ( lce.getResultSet() == resultSet && lce.getPersister() == persister ) { + if ( matches == null ) { + matches = new ArrayList(); + } + matches.add( lce ); + if ( lce.getCollection().getOwner() == null ) { + session.getPersistenceContext().addUnownedCollection( + new CollectionKey( persister, lce.getKey(), session.getEntityMode() ), + lce.getCollection() + ); + } + if ( log.isTraceEnabled() ) { + log.trace( "removing collection load entry [" + lce + "]" ); + } + + // todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)... + loadContexts.unregisterLoadingCollectionXRef( collectionKey ); + iter.remove(); + } + } + + endLoadingCollections( persister, matches ); + if ( localLoadingCollectionKeys.isEmpty() ) { + // todo : hack!!! + // NOTE : here we cleanup the load context when we have no more local + // LCE entries. This "works" for the time being because really + // only the collection load contexts are implemented. Long term, + // this cleanup should become part of the "close result set" + // processing from the (sandbox/jdbc) jdbc-container code. + loadContexts.cleanup( resultSet ); + } + } + + private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) { + if ( matchedCollectionEntries == null ) { + if ( log.isDebugEnabled() ) { + log.debug( "no collections were found in result set for role: " + persister.getRole() ); + } + return; + } + + final int count = matchedCollectionEntries.size(); + if ( log.isDebugEnabled() ) { + log.debug( count + " collections were found in result set for role: " + persister.getRole() ); + } + + for ( int i = 0; i < count; i++ ) { + LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i ); + endLoadingCollection( lce, persister ); + } + + if ( log.isDebugEnabled() ) { + log.debug( count + " collections initialized for role: " + persister.getRole() ); + } + } + + private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) { + if ( log.isTraceEnabled() ) { + log.debug( "ending loading collection [" + lce + "]" ); + } + final SessionImplementor session = getLoadContext().getPersistenceContext().getSession(); + final EntityMode em = session.getEntityMode(); + + boolean hasNoQueuedAdds = lce.getCollection().endRead(); // warning: can cause a recursive calls! (proxy initialization) + + if ( persister.getCollectionType().hasHolder( em ) ) { + getLoadContext().getPersistenceContext().addCollectionHolder( lce.getCollection() ); + } + + CollectionEntry ce = getLoadContext().getPersistenceContext().getCollectionEntry( lce.getCollection() ); + if ( ce == null ) { + ce = getLoadContext().getPersistenceContext().addInitializedCollection( persister, lce.getCollection(), lce.getKey() ); + } + else { + ce.postInitialize( lce.getCollection() ); + } + + boolean addToCache = hasNoQueuedAdds && // there were no queued additions + persister.hasCache() && // and the role has a cache + session.getCacheMode().isPutEnabled() && + !ce.isDoremove(); // and this is not a forced initialization during flush + if ( addToCache ) { + addCollectionToCache( lce, persister ); + } + + if ( log.isDebugEnabled() ) { + log.debug( "collection fully initialized: " + MessageHelper.collectionInfoString(persister, lce.getKey(), session.getFactory() ) ); + } + + if ( session.getFactory().getStatistics().isStatisticsEnabled() ) { + session.getFactory().getStatisticsImplementor().loadCollection( persister.getRole() ); + } + } + + /** + * Add the collection to the second-level cache + * + * @param lce The entry representing the collection to add + * @param persister The persister + */ + private void addCollectionToCache(LoadingCollectionEntry lce, CollectionPersister persister) { + final SessionImplementor session = getLoadContext().getPersistenceContext().getSession(); + final SessionFactoryImplementor factory = session.getFactory(); + + if ( log.isDebugEnabled() ) { + log.debug( "Caching collection: " + MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) ); + } + + if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) { + // some filters affecting the collection are enabled on the session, so do not do the put into the cache. + log.debug( "Refusing to add to cache due to enabled filters" ); + // todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered; + // but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones; + // currently this works in conjuction with the check on + // DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from + // cache with enabled filters). + return; // EARLY EXIT!!!!! + } + + final Comparator versionComparator; + final Object version; + if ( persister.isVersioned() ) { + versionComparator = persister.getOwnerEntityPersister().getVersionType().getComparator(); + final Object collectionOwner = getLoadContext().getPersistenceContext().getCollectionOwner( lce.getKey(), persister ); + version = getLoadContext().getPersistenceContext().getEntry( collectionOwner ).getVersion(); + } + else { + version = null; + versionComparator = null; + } + + CollectionCacheEntry entry = new CollectionCacheEntry( lce.getCollection(), persister ); + CacheKey cacheKey = new CacheKey( + lce.getKey(), + persister.getKeyType(), + persister.getRole(), + session.getEntityMode(), + session.getFactory() + ); + boolean put = persister.getCache().put( + cacheKey, + persister.getCacheEntryStructure().structure(entry), + session.getTimestamp(), + version, + versionComparator, + factory.getSettings().isMinimalPutsEnabled() && session.getCacheMode()!= CacheMode.REFRESH + ); + + if ( put && factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().secondLevelCachePut( persister.getCache().getRegionName() ); + } + } + + void cleanup() { + if ( !localLoadingCollectionKeys.isEmpty() ) { + log.warn( "On CollectionLoadContext#cleanup, localLoadingCollectionKeys contained [" + localLoadingCollectionKeys.size() + "] entries" ); + } + loadContexts.cleanupCollectionXRefs( localLoadingCollectionKeys ); + localLoadingCollectionKeys.clear(); + } + + + public String toString() { + return super.toString() + ""; + } +} diff --git a/src/org/hibernate/engine/loading/EntityLoadContext.java b/src/org/hibernate/engine/loading/EntityLoadContext.java new file mode 100644 index 0000000000..bcf0debf60 --- /dev/null +++ b/src/org/hibernate/engine/loading/EntityLoadContext.java @@ -0,0 +1,39 @@ +package org.hibernate.engine.loading; + +import java.sql.ResultSet; +import java.util.List; +import java.util.ArrayList; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * {@inheritDoc} + * + * @author Steve Ebersole + */ +public class EntityLoadContext { + private static final Log log = LogFactory.getLog( EntityLoadContext.class ); + + private final LoadContexts loadContexts; + private final ResultSet resultSet; + private final List hydratingEntities = new ArrayList( 20 ); // todo : need map? the prob is a proper key, right? + + public EntityLoadContext(LoadContexts loadContexts, ResultSet resultSet) { + this.loadContexts = loadContexts; + this.resultSet = resultSet; + } + + void cleanup() { + if ( !hydratingEntities.isEmpty() ) { + log.warn( "On CollectionLoadContext#clear, hydratingEntities contained [" + hydratingEntities.size() + "] entries" ); + } + hydratingEntities.clear(); + } + + + public String toString() { + return super.toString() + ""; + } + +} diff --git a/src/org/hibernate/engine/loading/LoadContexts.java b/src/org/hibernate/engine/loading/LoadContexts.java new file mode 100644 index 0000000000..c78c6440e7 --- /dev/null +++ b/src/org/hibernate/engine/loading/LoadContexts.java @@ -0,0 +1,299 @@ +package org.hibernate.engine.loading; + +import java.sql.ResultSet; +import java.util.Map; +import java.util.Set; +import java.util.Iterator; +import java.util.HashMap; +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.util.IdentityMap; +import org.hibernate.engine.PersistenceContext; +import org.hibernate.engine.CollectionKey; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.EntityMode; + +/** + * Maps {@link ResultSet result-sets} to specific contextual data + * related to processing that {@link ResultSet result-sets}. + *

    + * Implementation note: internally an {@link IdentityMap} is used to maintain + * the mappings; {@link IdentityMap} was chosen because I'd rather not be + * dependent upon potentially bad {@link ResultSet#equals} and {ResultSet#hashCode} + * implementations. + *

    + * Considering the JDBC-redesign work, would further like this contextual info + * not mapped seperately, but available based on the result set being processed. + * This would also allow maintaining a single mapping as we could reliably get + * notification of the result-set closing... + * + * @author Steve Ebersole + */ +public class LoadContexts { + private static final Log log = LogFactory.getLog( LoadContexts.class ); + + private final PersistenceContext persistenceContext; + private Map collectionLoadContexts; + private Map entityLoadContexts; + + private Map xrefLoadingCollectionEntries; + + /** + * Creates and binds this to the given persistence context. + * + * @param persistenceContext The persistence context to which this + * will be bound. + */ + public LoadContexts(PersistenceContext persistenceContext) { + this.persistenceContext = persistenceContext; + } + + /** + * Retrieves the persistence context to which this is bound. + * + * @return The persistence context to which this is bound. + */ + public PersistenceContext getPersistenceContext() { + return persistenceContext; + } + + private SessionImplementor getSession() { + return getPersistenceContext().getSession(); + } + + private EntityMode getEntityMode() { + return getSession().getEntityMode(); + } + + + // cleanup code ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Release internal state associated with the given result set. + *

    + * This should be called when we are done with processing said result set, + * ideally as the result set is being closed. + * + * @param resultSet The result set for which it is ok to release + * associated resources. + */ + public void cleanup(ResultSet resultSet) { + if ( collectionLoadContexts != null ) { + CollectionLoadContext collectionLoadContext = ( CollectionLoadContext ) collectionLoadContexts.remove( resultSet ); + collectionLoadContext.cleanup(); + } + if ( entityLoadContexts != null ) { + EntityLoadContext entityLoadContext = ( EntityLoadContext ) entityLoadContexts.remove( resultSet ); + entityLoadContext.cleanup(); + } + } + + /** + * Release internal state associated with *all* result sets. + *

    + * This is intended as a "failsafe" process to make sure we get everything + * cleaned up and released. + */ + public void cleanup() { + if ( collectionLoadContexts != null ) { + Iterator itr = collectionLoadContexts.values().iterator(); + while ( itr.hasNext() ) { + CollectionLoadContext collectionLoadContext = ( CollectionLoadContext ) itr.next(); + log.warn( "fail-safe cleanup (collections) : " + collectionLoadContext ); + collectionLoadContext.cleanup(); + } + collectionLoadContexts.clear(); + } + if ( entityLoadContexts != null ) { + Iterator itr = entityLoadContexts.values().iterator(); + while ( itr.hasNext() ) { + EntityLoadContext entityLoadContext = ( EntityLoadContext ) itr.next(); + log.warn( "fail-safe cleanup (entities) : " + entityLoadContext ); + entityLoadContext.cleanup(); + } + entityLoadContexts.clear(); + } + } + + + // Collection load contexts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Do we currently have any internal entries corresponding to loading + * collections? + * + * @return True if we currently hold state pertaining to loading collections; + * false otherwise. + */ + public boolean hasLoadingCollectionEntries() { + return ( xrefLoadingCollectionEntries != null && !xrefLoadingCollectionEntries.isEmpty() ); + } + + + /** + * Get the {@link CollectionLoadContext} associated with the given + * {@link ResultSet}, creating one if needed. + * + * @param resultSet The result set for which to retrieve the context. + * @return The processing context. + */ + public CollectionLoadContext getCollectionLoadContext(ResultSet resultSet) { + CollectionLoadContext context = null; + if ( collectionLoadContexts == null ) { + collectionLoadContexts = IdentityMap.instantiate( 8 ); + } + else { + context = ( CollectionLoadContext ) collectionLoadContexts.get( resultSet ); + } + if ( context == null ) { + if ( log.isTraceEnabled() ) { + log.trace( "constructing collection load context for result set [" + resultSet + "]" ); + } + context = new CollectionLoadContext( this, resultSet ); + collectionLoadContexts.put( resultSet, context ); + } + return context; + } + + /** + * Attempt to locate the loading collection given the owner's key. The lookup here + * occurs against all result-set contexts... + * + * @param persister The collection persister + * @param ownerKey The owner key + * @return The loading collection, or null if not found. + */ + public PersistentCollection locateLoadingCollection(CollectionPersister persister, Serializable ownerKey) { + LoadingCollectionEntry lce = locateLoadingCollectionEntry( new CollectionKey( persister, ownerKey, getEntityMode() ) ); + if ( lce != null ) { + if ( log.isTraceEnabled() ) { + log.trace( "returning loading collection:" + MessageHelper.collectionInfoString( persister, ownerKey, getSession().getFactory() ) ); + } + return lce.getCollection(); + } + else { + // todo : should really move this log statement to CollectionType, where this is used from... + if ( log.isTraceEnabled() ) { + log.trace( "creating collection wrapper:" + MessageHelper.collectionInfoString( persister, ownerKey, getSession().getFactory() ) ); + } + return null; + } + } + + + + + // loading collection xrefs ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Register a loading collection xref. + *

    + * This xref map is used because sometimes a collection is in process of + * being loaded from one result set, but needs to be accessed from the + * context of another "nested" result set processing. + *

    + * Implementation note: package protected, as this is meant solely for use + * by {@link CollectionLoadContext} to be able to locate collections + * being loaded by other {@link CollectionLoadContext}s/{@link ResultSet}s. + * + * @param entryKey The xref collection key + * @param entry The corresponding loading collection entry + */ + void registerLoadingCollectionXRef(CollectionKey entryKey, LoadingCollectionEntry entry) { + if ( xrefLoadingCollectionEntries == null ) { + xrefLoadingCollectionEntries = new HashMap(); + } + xrefLoadingCollectionEntries.put( entryKey, entry ); + } + + /** + * The inverse of {@link #registerLoadingCollectionXRef}. Here, we are done + * processing the said collection entry, so we remove it from the + * load context. + *

    + * The idea here is that other loading collections can now reference said + * collection directly from the {@link PersistenceContext} because it + * has completed its load cycle. + *

    + * Implementation note: package protected, as this is meant solely for use + * by {@link CollectionLoadContext} to be able to locate collections + * being loaded by other {@link CollectionLoadContext}s/{@link ResultSet}s. + * + * @param key The key of the collection we are done processing. + */ + void unregisterLoadingCollectionXRef(CollectionKey key) { + if ( !hasLoadingCollectionEntries() ) { + return; + } + xrefLoadingCollectionEntries.remove(key); + } + + /*package*/Map getLoadingCollectionXRefs() { + return xrefLoadingCollectionEntries; + } + + + /** + * Locate the LoadingCollectionEntry within *any* of the tracked + * {@link CollectionLoadContext}s. + *

    + * Implementation note: package protected, as this is meant solely for use + * by {@link CollectionLoadContext} to be able to locate collections + * being loaded by other {@link CollectionLoadContext}s/{@link ResultSet}s. + * + * @param key The collection key. + * @return The located entry; or null. + */ + LoadingCollectionEntry locateLoadingCollectionEntry(CollectionKey key) { + if ( xrefLoadingCollectionEntries == null ) { + return null; + } + if ( log.isTraceEnabled() ) { + log.trace( "attempting to locate loading collection entry [" + key + "] in any result-set context" ); + } + LoadingCollectionEntry rtn = ( LoadingCollectionEntry ) xrefLoadingCollectionEntries.get( key ); + if ( log.isTraceEnabled() ) { + if ( rtn == null ) { + log.trace( "collection [" + key + "] located in load context" ); + } + else { + log.trace( "collection [" + key + "] not located in load context" ); + } + } + return rtn; + } + + /*package*/void cleanupCollectionXRefs(Set entryKeys) { + Iterator itr = entryKeys.iterator(); + while ( itr.hasNext() ) { + final CollectionKey entryKey = ( CollectionKey ) itr.next(); + xrefLoadingCollectionEntries.remove( entryKey ); + } + } + + + // Entity load contexts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // * currently, not yet used... + + public EntityLoadContext getEntityLoadContext(ResultSet resultSet) { + EntityLoadContext context = null; + if ( entityLoadContexts == null ) { + entityLoadContexts = IdentityMap.instantiate( 8 ); + } + else { + context = ( EntityLoadContext ) entityLoadContexts.get( resultSet ); + } + if ( context == null ) { + context = new EntityLoadContext( this, resultSet ); + entityLoadContexts.put( resultSet, context ); + } + return context; + } + +} diff --git a/src/org/hibernate/engine/loading/LoadingCollectionEntry.java b/src/org/hibernate/engine/loading/LoadingCollectionEntry.java new file mode 100644 index 0000000000..ea3901769a --- /dev/null +++ b/src/org/hibernate/engine/loading/LoadingCollectionEntry.java @@ -0,0 +1,51 @@ +package org.hibernate.engine.loading; + +import java.io.Serializable; +import java.sql.ResultSet; + +import org.hibernate.collection.PersistentCollection; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; + +/** + * Represents a collection currently being loaded. + * + * @author Steve Ebersole + */ +public class LoadingCollectionEntry { + private final ResultSet resultSet; + private final CollectionPersister persister; + private final Serializable key; + private final PersistentCollection collection; + + public LoadingCollectionEntry( + ResultSet resultSet, + CollectionPersister persister, + Serializable key, + PersistentCollection collection) { + this.resultSet = resultSet; + this.persister = persister; + this.key = key; + this.collection = collection; + } + + public ResultSet getResultSet() { + return resultSet; + } + + public CollectionPersister getPersister() { + return persister; + } + + public Serializable getKey() { + return key; + } + + public PersistentCollection getCollection() { + return collection; + } + + public String toString() { + return getClass().getName() + "@" + Integer.toHexString( hashCode() ); + } +} diff --git a/src/org/hibernate/engine/package.html b/src/org/hibernate/engine/package.html new file mode 100755 index 0000000000..7ea6f972d3 --- /dev/null +++ b/src/org/hibernate/engine/package.html @@ -0,0 +1,9 @@ + + + +

    + This package contains classes that are "shared" by other packages, + and implementations of some key algorithms. +

    + + diff --git a/src/org/hibernate/engine/query/FilterQueryPlan.java b/src/org/hibernate/engine/query/FilterQueryPlan.java new file mode 100644 index 0000000000..74c677eff3 --- /dev/null +++ b/src/org/hibernate/engine/query/FilterQueryPlan.java @@ -0,0 +1,31 @@ +package org.hibernate.engine.query; + +import org.hibernate.engine.SessionFactoryImplementor; + +import java.io.Serializable; +import java.util.Map; + +/** + * Extends an HQLQueryPlan to maintain a reference to the collection-role name + * being filtered. + * + * @author Steve Ebersole + */ +public class FilterQueryPlan extends HQLQueryPlan implements Serializable { + + private final String collectionRole; + + public FilterQueryPlan( + String hql, + String collectionRole, + boolean shallow, + Map enabledFilters, + SessionFactoryImplementor factory) { + super( hql, collectionRole, shallow, enabledFilters, factory ); + this.collectionRole = collectionRole; + } + + public String getCollectionRole() { + return collectionRole; + } +} diff --git a/src/org/hibernate/engine/query/HQLQueryPlan.java b/src/org/hibernate/engine/query/HQLQueryPlan.java new file mode 100644 index 0000000000..7fc687a41e --- /dev/null +++ b/src/org/hibernate/engine/query/HQLQueryPlan.java @@ -0,0 +1,315 @@ +package org.hibernate.engine.query; + +import org.hibernate.hql.QuerySplitter; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.hql.ParameterTranslations; +import org.hibernate.hql.FilterTranslator; +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.EmptyIterator; +import org.hibernate.util.JoinedIterator; +import org.hibernate.util.IdentitySet; +import org.hibernate.HibernateException; +import org.hibernate.ScrollableResults; +import org.hibernate.QueryException; +import org.hibernate.type.Type; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.RowSelection; +import org.hibernate.event.EventSource; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.HashSet; +import java.util.List; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.HashMap; + +/** + * Defines a query execution plan for an HQL query (or filter). + * + * @author Steve Ebersole + */ +public class HQLQueryPlan implements Serializable { + + // TODO : keep seperate notions of QT[] here for shallow/non-shallow queries... + + private static final Log log = LogFactory.getLog( HQLQueryPlan.class ); + + private final String sourceQuery; + private final QueryTranslator[] translators; + private final String[] sqlStrings; + + private final ParameterMetadata parameterMetadata; + private final ReturnMetadata returnMetadata; + private final Set querySpaces; + + private final Set enabledFilterNames; + private final boolean shallow; + + + public HQLQueryPlan(String hql, boolean shallow, Map enabledFilters, SessionFactoryImplementor factory) { + this( hql, null, shallow, enabledFilters, factory ); + } + + protected HQLQueryPlan(String hql, String collectionRole, boolean shallow, Map enabledFilters, SessionFactoryImplementor factory) { + this.sourceQuery = hql; + this.shallow = shallow; + + Set copy = new HashSet(); + copy.addAll( enabledFilters.keySet() ); + this.enabledFilterNames = java.util.Collections.unmodifiableSet( copy ); + + Set combinedQuerySpaces = new HashSet(); + String[] concreteQueryStrings = QuerySplitter.concreteQueries( hql, factory ); + final int length = concreteQueryStrings.length; + translators = new QueryTranslator[length]; + List sqlStringList = new ArrayList(); + for ( int i=0; i 1 ) { + final int returns = translators[0].getReturnTypes().length; + returnMetadata = new ReturnMetadata( translators[0].getReturnAliases(), new Type[returns] ); + } + else { + returnMetadata = new ReturnMetadata( translators[0].getReturnAliases(), translators[0].getReturnTypes() ); + } + } + } + } + + public String getSourceQuery() { + return sourceQuery; + } + + public Set getQuerySpaces() { + return querySpaces; + } + + public ParameterMetadata getParameterMetadata() { + return parameterMetadata; + } + + public ReturnMetadata getReturnMetadata() { + return returnMetadata; + } + + public Set getEnabledFilterNames() { + return enabledFilterNames; + } + + public String[] getSqlStrings() { + return sqlStrings; + } + + public Set getUtilizedFilterNames() { + // TODO : add this info to the translator and aggregate it here... + return null; + } + + public boolean isShallow() { + return shallow; + } + + public List performList( + QueryParameters queryParameters, + SessionImplementor session) throws HibernateException { + if ( log.isTraceEnabled() ) { + log.trace( "find: " + getSourceQuery() ); + queryParameters.traceParameters( session.getFactory() ); + } + boolean hasLimit = queryParameters.getRowSelection() != null && + queryParameters.getRowSelection().definesLimits(); + boolean needsLimit = hasLimit && translators.length > 1; + QueryParameters queryParametersToUse; + if ( needsLimit ) { + log.warn( "firstResult/maxResults specified on polymorphic query; applying in memory!" ); + RowSelection selection = new RowSelection(); + selection.setFetchSize( queryParameters.getRowSelection().getFetchSize() ); + selection.setTimeout( queryParameters.getRowSelection().getTimeout() ); + queryParametersToUse = queryParameters.createCopyUsing( selection ); + } + else { + queryParametersToUse = queryParameters; + } + + List combinedResults = new ArrayList(); + IdentitySet distinction = new IdentitySet(); + int includedCount = -1; + translator_loop: for ( int i = 0; i < translators.length; i++ ) { + List tmp = translators[i].list( session, queryParametersToUse ); + if ( needsLimit ) { + // NOTE : firstRow is zero-based + int first = queryParameters.getRowSelection().getFirstRow() == null + ? 0 + : queryParameters.getRowSelection().getFirstRow().intValue(); + int max = queryParameters.getRowSelection().getMaxRows() == null + ? -1 + : queryParameters.getRowSelection().getMaxRows().intValue(); + final int size = tmp.size(); + for ( int x = 0; x < size; x++ ) { + final Object result = tmp.get( x ); + if ( distinction.add( result ) ) { + continue; + } + includedCount++; + if ( includedCount < first ) { + continue; + } + combinedResults.add( result ); + if ( max >= 0 && includedCount > max ) { + // break the outer loop !!! + break translator_loop; + } + } + } + else { + combinedResults.addAll( tmp ); + } + } + return combinedResults; + } + + public Iterator performIterate( + QueryParameters queryParameters, + EventSource session) throws HibernateException { + if ( log.isTraceEnabled() ) { + log.trace( "iterate: " + getSourceQuery() ); + queryParameters.traceParameters( session.getFactory() ); + } + if ( translators.length == 0 ) { + return EmptyIterator.INSTANCE; + } + + Iterator[] results = null; + boolean many = translators.length > 1; + if (many) { + results = new Iterator[translators.length]; + } + + Iterator result = null; + for ( int i = 0; i < translators.length; i++ ) { + result = translators[i].iterate( queryParameters, session ); + if (many) results[i] = result; + } + + return many ? new JoinedIterator(results) : result; + } + + public ScrollableResults performScroll( + QueryParameters queryParameters, + SessionImplementor session) throws HibernateException { + if ( log.isTraceEnabled() ) { + log.trace( "iterate: " + getSourceQuery() ); + queryParameters.traceParameters( session.getFactory() ); + } + if ( translators.length != 1 ) { + throw new QueryException( "implicit polymorphism not supported for scroll() queries" ); + } + if ( queryParameters.getRowSelection().definesLimits() && translators[0].containsCollectionFetches() ) { + throw new QueryException( "firstResult/maxResults not supported in conjunction with scroll() of a query containing collection fetches" ); + } + + return translators[0].scroll( queryParameters, session ); + } + + public int performExecuteUpdate(QueryParameters queryParameters, SessionImplementor session) + throws HibernateException { + if ( log.isTraceEnabled() ) { + log.trace( "executeUpdate: " + getSourceQuery() ); + queryParameters.traceParameters( session.getFactory() ); + } + if ( translators.length != 1 ) { + log.warn( "manipulation query [" + getSourceQuery() + "] resulted in [" + translators.length + "] split queries" ); + } + int result = 0; + for ( int i = 0; i < translators.length; i++ ) { + result += translators[i].executeUpdate( queryParameters, session ); + } + return result; + } + + private ParameterMetadata buildParameterMetadata(ParameterTranslations parameterTranslations, String hql) { + long start = System.currentTimeMillis(); + ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( hql ); + long end = System.currentTimeMillis(); + if ( log.isTraceEnabled() ) { + log.trace( "HQL param location recognition took " + (end - start) + " mills (" + hql + ")" ); + } + + int ordinalParamCount = parameterTranslations.getOrdinalParameterCount(); + int[] locations = ArrayHelper.toIntArray( recognizer.getOrdinalParameterLocationList() ); + if ( parameterTranslations.supportsOrdinalParameterMetadata() && locations.length != ordinalParamCount ) { + throw new HibernateException( "ordinal parameter mismatch" ); + } + ordinalParamCount = locations.length; + OrdinalParameterDescriptor[] ordinalParamDescriptors = new OrdinalParameterDescriptor[ordinalParamCount]; + for ( int i = 1; i <= ordinalParamCount; i++ ) { + ordinalParamDescriptors[ i - 1 ] = new OrdinalParameterDescriptor( + i, + parameterTranslations.supportsOrdinalParameterMetadata() + ? parameterTranslations.getOrdinalParameterExpectedType( i ) + : null, + locations[ i - 1 ] + ); + } + + Iterator itr = recognizer.getNamedParameterDescriptionMap().entrySet().iterator(); + Map namedParamDescriptorMap = new HashMap(); + while( itr.hasNext() ) { + final Map.Entry entry = ( Map.Entry ) itr.next(); + final String name = ( String ) entry.getKey(); + final ParamLocationRecognizer.NamedParameterDescription description = + ( ParamLocationRecognizer.NamedParameterDescription ) entry.getValue(); + namedParamDescriptorMap.put( + name, + new NamedParameterDescriptor( + name, + parameterTranslations.getNamedParameterExpectedType( name ), + description.buildPositionsArray(), + description.isJpaStyle() + ) + ); + } + + return new ParameterMetadata( ordinalParamDescriptors, namedParamDescriptorMap ); + } + + public QueryTranslator[] getTranslators() { + QueryTranslator[] copy = new QueryTranslator[translators.length]; + System.arraycopy(translators, 0, copy, 0, copy.length); + return copy; + } +} diff --git a/src/org/hibernate/engine/query/NamedParameterDescriptor.java b/src/org/hibernate/engine/query/NamedParameterDescriptor.java new file mode 100644 index 0000000000..7deafb80c9 --- /dev/null +++ b/src/org/hibernate/engine/query/NamedParameterDescriptor.java @@ -0,0 +1,40 @@ +package org.hibernate.engine.query; + +import org.hibernate.type.Type; + +import java.io.Serializable; + +/** + * Descriptor regarding a named parameter. + * + * @author Steve Ebersole + */ +public class NamedParameterDescriptor implements Serializable { + private final String name; + private final Type expectedType; + private final int[] sourceLocations; + private final boolean jpaStyle; + + public NamedParameterDescriptor(String name, Type expectedType, int[] sourceLocations, boolean jpaStyle) { + this.name = name; + this.expectedType = expectedType; + this.sourceLocations = sourceLocations; + this.jpaStyle = jpaStyle; + } + + public String getName() { + return name; + } + + public Type getExpectedType() { + return expectedType; + } + + public int[] getSourceLocations() { + return sourceLocations; + } + + public boolean isJpaStyle() { + return jpaStyle; + } +} diff --git a/src/org/hibernate/engine/query/NativeSQLQueryPlan.java b/src/org/hibernate/engine/query/NativeSQLQueryPlan.java new file mode 100644 index 0000000000..51384b5329 --- /dev/null +++ b/src/org/hibernate/engine/query/NativeSQLQueryPlan.java @@ -0,0 +1,182 @@ +package org.hibernate.engine.query; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.QueryException; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.action.BulkOperationCleanupAction; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.TypedValue; +import org.hibernate.event.EventSource; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.loader.custom.sql.SQLCustomQuery; +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; + +/** + * Defines a query execution plan for a native-SQL query. + * + * @author Steve Ebersole + */ +public class NativeSQLQueryPlan implements Serializable { + private final String sourceQuery; + + private final SQLCustomQuery customQuery; + + private static final Log log = LogFactory.getLog(NativeSQLQueryPlan.class); + + public NativeSQLQueryPlan( + NativeSQLQuerySpecification specification, + SessionFactoryImplementor factory) { + this.sourceQuery = specification.getQueryString(); + + customQuery = new SQLCustomQuery( + specification.getQueryString(), + specification.getQueryReturns(), + specification.getQuerySpaces(), + factory ); + } + + public String getSourceQuery() { + return sourceQuery; + } + + public SQLCustomQuery getCustomQuery() { + return customQuery; + } + + private int[] getNamedParameterLocs(String name) throws QueryException { + Object loc = customQuery.getNamedParameterBindPoints().get( name ); + if ( loc == null ) { + throw new QueryException( + "Named parameter does not appear in Query: " + name, + customQuery.getSQL() ); + } + if ( loc instanceof Integer ) { + return new int[] { ((Integer) loc ).intValue() }; + } + else { + return ArrayHelper.toIntArray( (List) loc ); + } + } + + /** + * Bind positional parameter values to the PreparedStatement + * (these are parameters specified by a JDBC-style ?). + */ + private int bindPositionalParameters(final PreparedStatement st, + final QueryParameters queryParameters, final int start, + final SessionImplementor session) throws SQLException, + HibernateException { + + final Object[] values = queryParameters + .getFilteredPositionalParameterValues(); + final Type[] types = queryParameters + .getFilteredPositionalParameterTypes(); + int span = 0; + for (int i = 0; i < values.length; i++) { + types[i].nullSafeSet( st, values[i], start + span, session ); + span += types[i].getColumnSpan( session.getFactory() ); + } + return span; + } + + /** + * Bind named parameters to the PreparedStatement. This has an + * empty implementation on this superclass and should be implemented by + * subclasses (queries) which allow named parameters. + */ + private int bindNamedParameters(final PreparedStatement ps, + final Map namedParams, final int start, + final SessionImplementor session) throws SQLException, + HibernateException { + + if ( namedParams != null ) { + // assumes that types are all of span 1 + Iterator iter = namedParams.entrySet().iterator(); + int result = 0; + while ( iter.hasNext() ) { + Map.Entry e = (Map.Entry) iter.next(); + String name = (String) e.getKey(); + TypedValue typedval = (TypedValue) e.getValue(); + int[] locs = getNamedParameterLocs( name ); + for (int i = 0; i < locs.length; i++) { + if ( log.isDebugEnabled() ) { + log.debug( "bindNamedParameters() " + + typedval.getValue() + " -> " + name + " [" + + (locs[i] + start ) + "]" ); + } + typedval.getType().nullSafeSet( ps, typedval.getValue(), + locs[i] + start, session ); + } + result += locs.length; + } + return result; + } + else { + return 0; + } + } + + protected void coordinateSharedCacheCleanup(SessionImplementor session) { + BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getCustomQuery().getQuerySpaces() ); + + action.init(); + + if ( session.isEventSource() ) { + ( ( EventSource ) session ).getActionQueue().addAction( action ); + } + } + + public int performExecuteUpdate(QueryParameters queryParameters, + SessionImplementor session) throws HibernateException { + + coordinateSharedCacheCleanup( session ); + + if(queryParameters.isCallable()) { + throw new IllegalArgumentException("callable not yet supported for native queries"); + } + + int result = 0; + PreparedStatement ps; + try { + queryParameters.processFilters( this.customQuery.getSQL(), + session ); + String sql = queryParameters.getFilteredSQL(); + + ps = session.getBatcher().prepareStatement( sql ); + + try { + int col = 1; + col += bindPositionalParameters( ps, queryParameters, col, + session ); + col += bindNamedParameters( ps, queryParameters + .getNamedParameters(), col, session ); + result = ps.executeUpdate(); + } + finally { + if ( ps != null ) { + session.getBatcher().closeStatement( ps ); + } + } + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( session.getFactory() + .getSQLExceptionConverter(), sqle, + "could not execute native bulk manipulation query", this.sourceQuery ); + } + + return result; + } + +} diff --git a/src/org/hibernate/engine/query/OrdinalParameterDescriptor.java b/src/org/hibernate/engine/query/OrdinalParameterDescriptor.java new file mode 100644 index 0000000000..654f34114d --- /dev/null +++ b/src/org/hibernate/engine/query/OrdinalParameterDescriptor.java @@ -0,0 +1,32 @@ +package org.hibernate.engine.query; + +import org.hibernate.type.Type; + +import java.io.Serializable; + +/** + * @author Steve Ebersole + */ +public class OrdinalParameterDescriptor implements Serializable { + private final int ordinalPosition; + private final Type expectedType; + private final int sourceLocation; + + public OrdinalParameterDescriptor(int ordinalPosition, Type expectedType, int sourceLocation) { + this.ordinalPosition = ordinalPosition; + this.expectedType = expectedType; + this.sourceLocation = sourceLocation; + } + + public int getOrdinalPosition() { + return ordinalPosition; + } + + public Type getExpectedType() { + return expectedType; + } + + public int getSourceLocation() { + return sourceLocation; + } +} diff --git a/src/org/hibernate/engine/query/ParamLocationRecognizer.java b/src/org/hibernate/engine/query/ParamLocationRecognizer.java new file mode 100644 index 0000000000..9a1208a461 --- /dev/null +++ b/src/org/hibernate/engine/query/ParamLocationRecognizer.java @@ -0,0 +1,108 @@ +package org.hibernate.engine.query; + +import org.hibernate.util.ArrayHelper; + +import java.util.Map; +import java.util.HashMap; +import java.util.List; +import java.util.ArrayList; + +/** + * Implements a parameter parser recognizer specifically for the purpose + * of journaling parameter locations. + * + * @author Steve Ebersole + */ +public class ParamLocationRecognizer implements ParameterParser.Recognizer { + + public static class NamedParameterDescription { + private final boolean jpaStyle; + private final List positions = new ArrayList(); + + public NamedParameterDescription(boolean jpaStyle) { + this.jpaStyle = jpaStyle; + } + + public boolean isJpaStyle() { + return jpaStyle; + } + + private void add(int position) { + positions.add( new Integer( position ) ); + } + + public int[] buildPositionsArray() { + return ArrayHelper.toIntArray( positions ); + } + } + + private Map namedParameterDescriptions = new HashMap(); + private List ordinalParameterLocationList = new ArrayList(); + + /** + * Convenience method for creating a param location recognizer and + * initiating the parse. + * + * @param query The query to be parsed for parameter locations. + * @return The generated recognizer, with journaled location info. + */ + public static ParamLocationRecognizer parseLocations(String query) { + ParamLocationRecognizer recognizer = new ParamLocationRecognizer(); + ParameterParser.parse( query, recognizer ); + return recognizer; + } + + /** + * Returns the map of named parameter locations. The map is keyed by + * parameter name; the corresponding value is a (@link NamedParameterDescription}. + * + * @return The map of named parameter locations. + */ + public Map getNamedParameterDescriptionMap() { + return namedParameterDescriptions; + } + + /** + * Returns the list of ordinal parameter locations. The list elements + * are Integers, representing the location for that given ordinal. Thus + * {@link #getOrdinalParameterLocationList()}.elementAt(n) represents the + * location for the nth parameter. + * + * @return The list of ordinal parameter locations. + */ + public List getOrdinalParameterLocationList() { + return ordinalParameterLocationList; + } + + + // Recognition code ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void ordinalParameter(int position) { + ordinalParameterLocationList.add( new Integer( position ) ); + } + + public void namedParameter(String name, int position) { + getOrBuildNamedParameterDescription( name, false ).add( position ); + } + + public void jpaPositionalParameter(String name, int position) { + getOrBuildNamedParameterDescription( name, true ).add( position ); + } + + private NamedParameterDescription getOrBuildNamedParameterDescription(String name, boolean jpa) { + NamedParameterDescription desc = ( NamedParameterDescription ) namedParameterDescriptions.get( name ); + if ( desc == null ) { + desc = new NamedParameterDescription( jpa ); + namedParameterDescriptions.put( name, desc ); + } + return desc; + } + + public void other(char character) { + // don't care... + } + + public void outParameter(int position) { + // don't care... + } +} diff --git a/src/org/hibernate/engine/query/ParameterMetadata.java b/src/org/hibernate/engine/query/ParameterMetadata.java new file mode 100644 index 0000000000..6b39485ce6 --- /dev/null +++ b/src/org/hibernate/engine/query/ParameterMetadata.java @@ -0,0 +1,88 @@ +package org.hibernate.engine.query; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.hibernate.QueryParameterException; +import org.hibernate.type.Type; + +/** + * Encapsulates metadata about parameters encountered within a query. + * + * @author Steve Ebersole + */ +public class ParameterMetadata implements Serializable { + + private static final OrdinalParameterDescriptor[] EMPTY_ORDINALS = new OrdinalParameterDescriptor[0]; + + private final OrdinalParameterDescriptor[] ordinalDescriptors; + private final Map namedDescriptorMap; + + /** + * Instantiates a ParameterMetadata container. + * + * @param ordinalDescriptors + * @param namedDescriptorMap + */ + public ParameterMetadata(OrdinalParameterDescriptor[] ordinalDescriptors, Map namedDescriptorMap) { + if ( ordinalDescriptors == null ) { + this.ordinalDescriptors = EMPTY_ORDINALS; + } + else { + OrdinalParameterDescriptor[] copy = new OrdinalParameterDescriptor[ ordinalDescriptors.length ]; + System.arraycopy( ordinalDescriptors, 0, copy, 0, ordinalDescriptors.length ); + this.ordinalDescriptors = copy; + } + if ( namedDescriptorMap == null ) { + this.namedDescriptorMap = java.util.Collections.EMPTY_MAP; + } + else { + int size = ( int ) ( ( namedDescriptorMap.size() / .75 ) + 1 ); + Map copy = new HashMap( size ); + copy.putAll( namedDescriptorMap ); + this.namedDescriptorMap = java.util.Collections.unmodifiableMap( copy ); + } + } + + public int getOrdinalParameterCount() { + return ordinalDescriptors.length; + } + + public OrdinalParameterDescriptor getOrdinalParameterDescriptor(int position) { + if ( position < 1 || position > ordinalDescriptors.length ) { + throw new IndexOutOfBoundsException( "Remember that ordinal parameters are 1-based!" ); + } + return ordinalDescriptors[position - 1]; + } + + public Type getOrdinalParameterExpectedType(int position) { + return getOrdinalParameterDescriptor( position ).getExpectedType(); + } + + public int getOrdinalParameterSourceLocation(int position) { + return getOrdinalParameterDescriptor( position ).getSourceLocation(); + } + + public Set getNamedParameterNames() { + return namedDescriptorMap.keySet(); + } + + public NamedParameterDescriptor getNamedParameterDescriptor(String name) { + NamedParameterDescriptor meta = ( NamedParameterDescriptor ) namedDescriptorMap.get( name ); + if ( meta == null ) { + throw new QueryParameterException( "could not locate named parameter [" + name + "]" ); + } + return meta; + } + + public Type getNamedParameterExpectedType(String name) { + return getNamedParameterDescriptor( name ).getExpectedType(); + } + + public int[] getNamedParameterSourceLocations(String name) { + return getNamedParameterDescriptor( name ).getSourceLocations(); + } + +} diff --git a/src/org/hibernate/engine/query/ParameterParser.java b/src/org/hibernate/engine/query/ParameterParser.java new file mode 100644 index 0000000000..4b73e01455 --- /dev/null +++ b/src/org/hibernate/engine/query/ParameterParser.java @@ -0,0 +1,108 @@ +package org.hibernate.engine.query; + +import org.hibernate.QueryException; +import org.hibernate.hql.classic.ParserHelper; +import org.hibernate.util.StringHelper; + +/** + * The single available method {@link #parse} is responsible for parsing a + * query string and recognizing tokens in relation to parameters (either + * named, JPA-style, or ordinal) and providing callbacks about such + * recognitions. + * + * @author Steve Ebersole + */ +public class ParameterParser { + + public static interface Recognizer { + public void outParameter(int position); + public void ordinalParameter(int position); + public void namedParameter(String name, int position); + public void jpaPositionalParameter(String name, int position); + public void other(char character); + } + + private ParameterParser() { + // disallow instantiation + } + + /** + * Performs the actual parsing and tokenizing of the query string making appropriate + * callbacks to the given recognizer upon recognition of the various tokens. + *

    + * Note that currently, this only knows how to deal with a single output + * parameter (for callable statements). If we later add support for + * multiple output params, this, obviously, needs to change. + * + * @param sqlString The string to be parsed/tokenized. + * @param recognizer The thing which handles recognition events. + * @throws QueryException + */ + public static void parse(String sqlString, Recognizer recognizer) throws QueryException { + boolean hasMainOutputParameter = sqlString.indexOf( "call" ) > 0 && + sqlString.indexOf( "?" ) < sqlString.indexOf( "call" ) && + sqlString.indexOf( "=" ) < sqlString.indexOf( "call" ); + boolean foundMainOutputParam = false; + + int stringLength = sqlString.length(); + boolean inQuote = false; + for ( int indx = 0; indx < stringLength; indx++ ) { + char c = sqlString.charAt( indx ); + if ( inQuote ) { + if ( '\'' == c ) { + inQuote = false; + } + recognizer.other( c ); + } + else if ( '\'' == c ) { + inQuote = true; + recognizer.other( c ); + } + else { + if ( c == ':' ) { + // named parameter + int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS, indx + 1 ); + int chopLocation = right < 0 ? sqlString.length() : right; + String param = sqlString.substring( indx + 1, chopLocation ); + if ( StringHelper.isEmpty( param ) ) { + throw new QueryException("Space is not allowed after parameter prefix ':' '" + + sqlString + "'"); + } + recognizer.namedParameter( param, indx ); + indx = chopLocation - 1; + } + else if ( c == '?' ) { + // could be either an ordinal or JPA-positional parameter + if ( indx < stringLength - 1 && Character.isDigit( sqlString.charAt( indx + 1 ) ) ) { + // a peek ahead showed this as an JPA-positional parameter + int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS, indx + 1 ); + int chopLocation = right < 0 ? sqlString.length() : right; + String param = sqlString.substring( indx + 1, chopLocation ); + // make sure this "name" is an integral + try { + new Integer( param ); + } + catch( NumberFormatException e ) { + throw new QueryException( "JPA-style positional param was not an integral ordinal" ); + } + recognizer.jpaPositionalParameter( param, indx ); + indx = chopLocation - 1; + } + else { + if ( hasMainOutputParameter && !foundMainOutputParam ) { + foundMainOutputParam = true; + recognizer.outParameter( indx ); + } + else { + recognizer.ordinalParameter( indx ); + } + } + } + else { + recognizer.other( c ); + } + } + } + } + +} diff --git a/src/org/hibernate/engine/query/QueryMetadata.java b/src/org/hibernate/engine/query/QueryMetadata.java new file mode 100644 index 0000000000..a5aec25feb --- /dev/null +++ b/src/org/hibernate/engine/query/QueryMetadata.java @@ -0,0 +1,72 @@ +package org.hibernate.engine.query; + +import org.hibernate.type.Type; + +import java.io.Serializable; +import java.util.Set; + +/** + * Defines metadata regarding a translated HQL or native-SQL query. + * + * @author Steve Ebersole + */ +public class QueryMetadata implements Serializable { + private final String sourceQuery; + private final ParameterMetadata parameterMetadata; + private final String[] returnAliases; + private final Type[] returnTypes; + private final Set querySpaces; + + public QueryMetadata( + String sourceQuery, + ParameterMetadata parameterMetadata, + String[] returnAliases, + Type[] returnTypes, + Set querySpaces) { + this.sourceQuery = sourceQuery; + this.parameterMetadata = parameterMetadata; + this.returnAliases = returnAliases; + this.returnTypes = returnTypes; + this.querySpaces = querySpaces; + } + + /** + * Get the source HQL or native-SQL query. + * + * @return The source query. + */ + public String getSourceQuery() { + return sourceQuery; + } + + public ParameterMetadata getParameterMetadata() { + return parameterMetadata; + } + + /** + * Return source query select clause aliases (if any) + * + * @return an array of aliases as strings. + */ + public String[] getReturnAliases() { + return returnAliases; + } + + /** + * An array of types describing the returns of the source query. + * + * @return The return type array. + */ + public Type[] getReturnTypes() { + return returnTypes; + } + + /** + * The set of query spaces affected by this source query. + * + * @return The set of query spaces. + */ + public Set getQuerySpaces() { + return querySpaces; + } +} diff --git a/src/org/hibernate/engine/query/QueryPlanCache.java b/src/org/hibernate/engine/query/QueryPlanCache.java new file mode 100644 index 0000000000..053a62bad2 --- /dev/null +++ b/src/org/hibernate/engine/query/QueryPlanCache.java @@ -0,0 +1,263 @@ +package org.hibernate.engine.query; + +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.SimpleMRUCache; +import org.hibernate.util.SoftLimitMRUCache; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.QueryException; +import org.hibernate.MappingException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.io.Serializable; +import java.util.Map; +import java.util.HashMap; +import java.util.List; +import java.util.Iterator; +import java.util.Set; +import java.util.HashSet; +import java.util.Collections; + +/** + * Acts as a cache for compiled query plans, as well as query-parameter metadata. + * + * @author Steve Ebersole + */ +public class QueryPlanCache implements Serializable { + + private static final Log log = LogFactory.getLog( QueryPlanCache.class ); + + private SessionFactoryImplementor factory; + + public QueryPlanCache(SessionFactoryImplementor factory) { + this.factory = factory; + } + + // simple cache of param metadata based on query string. Ideally, the + // original "user-supplied query" string should be used to retreive this + // metadata (i.e., not the para-list-expanded query string) to avoid + // unnecessary cache entries. + // Used solely for caching param metadata for native-sql queries, see + // getSQLParameterMetadata() for a discussion as to why... + private final SimpleMRUCache sqlParamMetadataCache = new SimpleMRUCache(); + + // the cache of the actual plans... + private final SoftLimitMRUCache planCache = new SoftLimitMRUCache( 128 ); + + + public ParameterMetadata getSQLParameterMetadata(String query) { + ParameterMetadata metadata = ( ParameterMetadata ) sqlParamMetadataCache.get( query ); + if ( metadata == null ) { + // for native-sql queries, the param metadata is determined outside + // any relation to a query plan, because query plan creation and/or + // retreival for a native-sql query depends on all of the return + // types having been set, which might not be the case up-front when + // param metadata would be most useful + metadata = buildNativeSQLParameterMetadata( query ); + sqlParamMetadataCache.put( query, metadata ); + } + return metadata; + } + + public HQLQueryPlan getHQLQueryPlan(String queryString, boolean shallow, Map enabledFilters) + throws QueryException, MappingException { + HQLQueryPlanKey key = new HQLQueryPlanKey( queryString, shallow, enabledFilters ); + HQLQueryPlan plan = ( HQLQueryPlan ) planCache.get ( key ); + + if ( plan == null ) { + if ( log.isTraceEnabled() ) { + log.trace( "unable to locate HQL query plan in cache; generating (" + queryString + ")" ); + } + plan = new HQLQueryPlan(queryString, shallow, enabledFilters, factory ); + } + else { + if ( log.isTraceEnabled() ) { + log.trace( "located HQL query plan in cache (" + queryString + ")" ); + } + } + + planCache.put( key, plan ); + + return plan; + } + + public FilterQueryPlan getFilterQueryPlan(String filterString, String collectionRole, boolean shallow, Map enabledFilters) + throws QueryException, MappingException { + FilterQueryPlanKey key = new FilterQueryPlanKey( filterString, collectionRole, shallow, enabledFilters ); + FilterQueryPlan plan = ( FilterQueryPlan ) planCache.get ( key ); + + if ( plan == null ) { + if ( log.isTraceEnabled() ) { + log.trace( "unable to locate collection-filter query plan in cache; generating (" + collectionRole + " : " + filterString + ")" ); + } + plan = new FilterQueryPlan( filterString, collectionRole, shallow, enabledFilters, factory ); + } + else { + if ( log.isTraceEnabled() ) { + log.trace( "located collection-filter query plan in cache (" + collectionRole + " : " + filterString + ")" ); + } + } + + planCache.put( key, plan ); + + return plan; + } + + public NativeSQLQueryPlan getNativeSQLQueryPlan(NativeSQLQuerySpecification spec) { + NativeSQLQueryPlan plan = ( NativeSQLQueryPlan ) planCache.get( spec ); + + if ( plan == null ) { + if ( log.isTraceEnabled() ) { + log.trace( "unable to locate native-sql query plan in cache; generating (" + spec.getQueryString() + ")" ); + } + plan = new NativeSQLQueryPlan( spec, factory ); + } + else { + if ( log.isTraceEnabled() ) { + log.trace( "located native-sql query plan in cache (" + spec.getQueryString() + ")" ); + } + } + + planCache.put( spec, plan ); + return plan; + } + + private ParameterMetadata buildNativeSQLParameterMetadata(String sqlString) { + ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( sqlString ); + + OrdinalParameterDescriptor[] ordinalDescriptors = + new OrdinalParameterDescriptor[ recognizer.getOrdinalParameterLocationList().size() ]; + for ( int i = 0; i < recognizer.getOrdinalParameterLocationList().size(); i++ ) { + final Integer position = ( Integer ) recognizer.getOrdinalParameterLocationList().get( i ); + ordinalDescriptors[i] = new OrdinalParameterDescriptor( i, null, position.intValue() ); + } + + Iterator itr = recognizer.getNamedParameterDescriptionMap().entrySet().iterator(); + Map namedParamDescriptorMap = new HashMap(); + while( itr.hasNext() ) { + final Map.Entry entry = ( Map.Entry ) itr.next(); + final String name = ( String ) entry.getKey(); + final ParamLocationRecognizer.NamedParameterDescription description = + ( ParamLocationRecognizer.NamedParameterDescription ) entry.getValue(); + namedParamDescriptorMap.put( + name , + new NamedParameterDescriptor( name, null, description.buildPositionsArray(), description.isJpaStyle() ) + ); + } + + return new ParameterMetadata( ordinalDescriptors, namedParamDescriptorMap ); + } + + private static class HQLQueryPlanKey implements Serializable { + private final String query; + private final boolean shallow; + private final Set filterNames; + private final int hashCode; + + public HQLQueryPlanKey(String query, boolean shallow, Map enabledFilters) { + this.query = query; + this.shallow = shallow; + + if ( enabledFilters == null || enabledFilters.isEmpty() ) { + filterNames = Collections.EMPTY_SET; + } + else { + Set tmp = new HashSet(); + tmp.addAll( enabledFilters.keySet() ); + this.filterNames = Collections.unmodifiableSet( tmp ); + } + + int hash = query.hashCode(); + hash = 29 * hash + ( shallow ? 1 : 0 ); + hash = 29 * hash + filterNames.hashCode(); + this.hashCode = hash; + } + + public boolean equals(Object o) { + if ( this == o ) { + return true; + } + if ( o == null || getClass() != o.getClass() ) { + return false; + } + + final HQLQueryPlanKey that = ( HQLQueryPlanKey ) o; + + if ( shallow != that.shallow ) { + return false; + } + if ( !filterNames.equals( that.filterNames ) ) { + return false; + } + if ( !query.equals( that.query ) ) { + return false; + } + + return true; + } + + public int hashCode() { + return hashCode; + } + } + + private static class FilterQueryPlanKey implements Serializable { + private final String query; + private final String collectionRole; + private final boolean shallow; + private final Set filterNames; + private final int hashCode; + + public FilterQueryPlanKey(String query, String collectionRole, boolean shallow, Map enabledFilters) { + this.query = query; + this.collectionRole = collectionRole; + this.shallow = shallow; + + if ( enabledFilters == null || enabledFilters.isEmpty() ) { + filterNames = Collections.EMPTY_SET; + } + else { + Set tmp = new HashSet(); + tmp.addAll( enabledFilters.keySet() ); + this.filterNames = Collections.unmodifiableSet( tmp ); + } + + int hash = query.hashCode(); + hash = 29 * hash + collectionRole.hashCode(); + hash = 29 * hash + ( shallow ? 1 : 0 ); + hash = 29 * hash + filterNames.hashCode(); + this.hashCode = hash; + } + + public boolean equals(Object o) { + if ( this == o ) { + return true; + } + if ( o == null || getClass() != o.getClass() ) { + return false; + } + + final FilterQueryPlanKey that = ( FilterQueryPlanKey ) o; + + if ( shallow != that.shallow ) { + return false; + } + if ( !filterNames.equals( that.filterNames ) ) { + return false; + } + if ( !query.equals( that.query ) ) { + return false; + } + if ( !collectionRole.equals( that.collectionRole ) ) { + return false; + } + + return true; + } + + public int hashCode() { + return hashCode; + } + } +} diff --git a/src/org/hibernate/engine/query/ReturnMetadata.java b/src/org/hibernate/engine/query/ReturnMetadata.java new file mode 100644 index 0000000000..3d93714636 --- /dev/null +++ b/src/org/hibernate/engine/query/ReturnMetadata.java @@ -0,0 +1,26 @@ +package org.hibernate.engine.query; + +import org.hibernate.type.Type; + +import java.io.Serializable; + +/** + * @author Steve Ebersole + */ +public class ReturnMetadata implements Serializable { + private final String[] returnAliases; + private final Type[] returnTypes; + + public ReturnMetadata(String[] returnAliases, Type[] returnTypes) { + this.returnAliases = returnAliases; + this.returnTypes = returnTypes; + } + + public String[] getReturnAliases() { + return returnAliases; + } + + public Type[] getReturnTypes() { + return returnTypes; + } +} diff --git a/src/org/hibernate/engine/query/sql/NativeSQLQueryCollectionReturn.java b/src/org/hibernate/engine/query/sql/NativeSQLQueryCollectionReturn.java new file mode 100644 index 0000000000..ec6be8cc4f --- /dev/null +++ b/src/org/hibernate/engine/query/sql/NativeSQLQueryCollectionReturn.java @@ -0,0 +1,60 @@ +// $Id: NativeSQLQueryCollectionReturn.java 7232 2005-06-19 17:16:40 -0500 (Sun, 19 Jun 2005) maxcsaucdk $ +package org.hibernate.engine.query.sql; + +import java.util.Map; + +import org.hibernate.LockMode; + +/** + * Represents a return defined as part of a native sql query which + * names a collection role in the form {classname}.{collectionrole}; it + * is used in defining a custom sql query for loading an entity's + * collection in non-fetching scenarios (i.e., loading the collection + * itself as the "root" of the result). + * + * @author Steve Ebersole + */ +public class NativeSQLQueryCollectionReturn extends NativeSQLQueryNonScalarReturn { + private String ownerEntityName; + private String ownerProperty; + + /** + * Construct a native-sql return representing a collection initializer + * + * @param alias The result alias + * @param ownerEntityName The entity-name of the entity owning the collection + * to be initialized. + * @param ownerProperty The property name (on the owner) which represents + * the collection to be initialized. + * @param propertyResults Any user-supplied column->property mappings + * @param lockMode The lock mode to apply to the collection. + */ + public NativeSQLQueryCollectionReturn( + String alias, + String ownerEntityName, + String ownerProperty, + Map propertyResults, + LockMode lockMode) { + super( alias, propertyResults, lockMode ); + this.ownerEntityName = ownerEntityName; + this.ownerProperty = ownerProperty; + } + + /** + * Returns the class owning the collection. + * + * @return The class owning the collection. + */ + public String getOwnerEntityName() { + return ownerEntityName; + } + + /** + * Returns the name of the property representing the collection from the {@link #getOwnerEntityName}. + * + * @return The name of the property representing the collection on the owner class. + */ + public String getOwnerProperty() { + return ownerProperty; + } +} diff --git a/src/org/hibernate/engine/query/sql/NativeSQLQueryJoinReturn.java b/src/org/hibernate/engine/query/sql/NativeSQLQueryJoinReturn.java new file mode 100644 index 0000000000..f2b1b7ef28 --- /dev/null +++ b/src/org/hibernate/engine/query/sql/NativeSQLQueryJoinReturn.java @@ -0,0 +1,56 @@ +// $Id: NativeSQLQueryJoinReturn.java 7232 2005-06-19 17:16:40 -0500 (Sun, 19 Jun 2005) maxcsaucdk $ +package org.hibernate.engine.query.sql; + +import java.util.Map; + +import org.hibernate.LockMode; + +/** + * Represents a return defined as part of a native sql query which + * names a fetched role. + * + * @author Steve Ebersole + */ +public class NativeSQLQueryJoinReturn extends NativeSQLQueryNonScalarReturn { + private String ownerAlias; + private String ownerProperty; + + /** + * Construct a return descriptor representing some form of fetch. + * + * @param alias The result alias + * @param ownerAlias The owner's result alias + * @param ownerProperty The owner's property representing the thing to be fetched + * @param propertyResults Any user-supplied column->property mappings + * @param lockMode The lock mode to apply + */ + public NativeSQLQueryJoinReturn( + String alias, + String ownerAlias, + String ownerProperty, + Map propertyResults, + LockMode lockMode) { + super( alias, propertyResults, lockMode ); + this.ownerAlias = ownerAlias; + this.ownerProperty = ownerProperty; + } + + /** + * Retrieve the alias of the owner of this fetched association. + * + * @return The owner's alias. + */ + public String getOwnerAlias() { + return ownerAlias; + } + + /** + * Retrieve the property name (relative to the owner) which maps to + * the association to be fetched. + * + * @return The property name. + */ + public String getOwnerProperty() { + return ownerProperty; + } +} diff --git a/src/org/hibernate/engine/query/sql/NativeSQLQueryNonScalarReturn.java b/src/org/hibernate/engine/query/sql/NativeSQLQueryNonScalarReturn.java new file mode 100644 index 0000000000..0692a38d81 --- /dev/null +++ b/src/org/hibernate/engine/query/sql/NativeSQLQueryNonScalarReturn.java @@ -0,0 +1,67 @@ +// $Id: NativeSQLQueryNonScalarReturn.java 7232 2005-06-19 17:16:40 -0500 (Sun, 19 Jun 2005) maxcsaucdk $ +package org.hibernate.engine.query.sql; + +import java.io.Serializable; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.hibernate.HibernateException; +import org.hibernate.LockMode; + +/** + * Represents the base information for a non-scalar return defined as part of + * a native sql query. + * + * @author Steve Ebersole + */ +public abstract class NativeSQLQueryNonScalarReturn implements NativeSQLQueryReturn, Serializable { + private final String alias; + private final LockMode lockMode; + private final Map propertyResults = new HashMap(); + + /** + * Constructs some form of non-scalar return descriptor + * + * @param alias The result alias + * @param propertyResults Any user-supplied column->property mappings + * @param lockMode The lock mode to apply to the return. + */ + protected NativeSQLQueryNonScalarReturn(String alias, Map propertyResults, LockMode lockMode) { + this.alias = alias; + if ( alias == null ) { + throw new HibernateException("alias must be specified"); + } + this.lockMode = lockMode; + if ( propertyResults != null ) { + this.propertyResults.putAll( propertyResults ); + } + } + + /** + * Retrieve the defined result alias + * + * @return The result alias. + */ + public String getAlias() { + return alias; + } + + /** + * Retrieve the lock-mode to apply to this return + * + * @return The lock mode + */ + public LockMode getLockMode() { + return lockMode; + } + + /** + * Retrieve the user-supplied column->property mappings. + * + * @return The property mappings. + */ + public Map getPropertyResultsMap() { + return Collections.unmodifiableMap( propertyResults ); + } +} diff --git a/src/org/hibernate/engine/query/sql/NativeSQLQueryReturn.java b/src/org/hibernate/engine/query/sql/NativeSQLQueryReturn.java new file mode 100644 index 0000000000..f090d97030 --- /dev/null +++ b/src/org/hibernate/engine/query/sql/NativeSQLQueryReturn.java @@ -0,0 +1,9 @@ +package org.hibernate.engine.query.sql; + +/** + * Describes a return in a native SQL query. + * + * @author Steve Ebersole + */ +public interface NativeSQLQueryReturn { +} diff --git a/src/org/hibernate/engine/query/sql/NativeSQLQueryRootReturn.java b/src/org/hibernate/engine/query/sql/NativeSQLQueryRootReturn.java new file mode 100644 index 0000000000..e88d43e0e5 --- /dev/null +++ b/src/org/hibernate/engine/query/sql/NativeSQLQueryRootReturn.java @@ -0,0 +1,52 @@ +// $Id: NativeSQLQueryRootReturn.java 7232 2005-06-19 17:16:40 -0500 (Sun, 19 Jun 2005) maxcsaucdk $ +package org.hibernate.engine.query.sql; + +import java.util.Map; + +import org.hibernate.LockMode; + +/** + * Represents a return defined as part of a native sql query which + * names a "root" entity. A root entity means it is explicitly a + * "column" in the result, as opposed to a fetched relationship or role. + * + * @author Steve Ebersole + */ +public class NativeSQLQueryRootReturn extends NativeSQLQueryNonScalarReturn { + private String returnEntityName; + + /** + * Construct a return representing an entity returned at the root + * of the result. + * + * @param alias The result alias + * @param entityName The entity name. + * @param lockMode The lock mode to apply + */ + public NativeSQLQueryRootReturn(String alias, String entityName, LockMode lockMode) { + this(alias, entityName, null, lockMode); + } + + /** + * + * @param alias The result alias + * @param entityName The entity name. + * @param propertyResults Any user-supplied column->property mappings + * @param lockMode The lock mode to apply + */ + public NativeSQLQueryRootReturn(String alias, String entityName, Map propertyResults, LockMode lockMode) { + super( alias, propertyResults, lockMode ); + this.returnEntityName = entityName; + + } + + /** + * The name of the entity to be returned. + * + * @return The entity name + */ + public String getReturnEntityName() { + return returnEntityName; + } + +} diff --git a/src/org/hibernate/engine/query/sql/NativeSQLQueryScalarReturn.java b/src/org/hibernate/engine/query/sql/NativeSQLQueryScalarReturn.java new file mode 100644 index 0000000000..351f91d0da --- /dev/null +++ b/src/org/hibernate/engine/query/sql/NativeSQLQueryScalarReturn.java @@ -0,0 +1,27 @@ +package org.hibernate.engine.query.sql; + +import org.hibernate.type.Type; + +/** + * Describes a scalar return in a native SQL query. + * + * @author gloegl + */ +public class NativeSQLQueryScalarReturn implements NativeSQLQueryReturn { + private Type type; + private String columnAlias; + + public NativeSQLQueryScalarReturn(String alias, Type type) { + this.type = type; + this.columnAlias = alias; + } + + public String getColumnAlias() { + return columnAlias; + } + + public Type getType() { + return type; + } + +} diff --git a/src/org/hibernate/engine/query/sql/NativeSQLQuerySpecification.java b/src/org/hibernate/engine/query/sql/NativeSQLQuerySpecification.java new file mode 100644 index 0000000000..e654a06b94 --- /dev/null +++ b/src/org/hibernate/engine/query/sql/NativeSQLQuerySpecification.java @@ -0,0 +1,80 @@ +package org.hibernate.engine.query.sql; + +import org.hibernate.util.ArrayHelper; + +import java.util.Set; +import java.util.Collection; +import java.util.HashSet; +import java.util.Arrays; +import java.util.Collections; + +/** + * Defines the specification or blue-print for a native-sql query. + * Essentially a simple struct containing the information needed to "translate" + * a native-sql query and cache that translated representation. Also used as + * the key by which the native-sql query plans are cached. + * + * @author Steve Ebersole + */ +public class NativeSQLQuerySpecification { + private final String queryString; + private final NativeSQLQueryReturn[] queryReturns; + private final Set querySpaces; + private final int hashCode; + + public NativeSQLQuerySpecification( + String queryString, + NativeSQLQueryReturn[] queryReturns, + Collection querySpaces) { + this.queryString = queryString; + this.queryReturns = queryReturns; + if ( querySpaces == null ) { + this.querySpaces = Collections.EMPTY_SET; + } + else { + Set tmp = new HashSet(); + tmp.addAll( querySpaces ); + this.querySpaces = Collections.unmodifiableSet( tmp ); + } + + // pre-determine and cache the hashcode + int hashCode = queryString.hashCode(); + hashCode = 29 * hashCode + this.querySpaces.hashCode(); + if ( this.queryReturns != null ) { + hashCode = 29 * hashCode + ArrayHelper.toList( this.queryReturns ).hashCode(); + } + this.hashCode = hashCode; + } + + public String getQueryString() { + return queryString; + } + + public NativeSQLQueryReturn[] getQueryReturns() { + return queryReturns; + } + + public Set getQuerySpaces() { + return querySpaces; + } + + public boolean equals(Object o) { + if ( this == o ) { + return true; + } + if ( o == null || getClass() != o.getClass() ) { + return false; + } + + final NativeSQLQuerySpecification that = ( NativeSQLQuerySpecification ) o; + + return querySpaces.equals( that.querySpaces ) && + queryString.equals( that.queryString ) && + Arrays.equals( queryReturns, that.queryReturns ); + } + + + public int hashCode() { + return hashCode; + } +} diff --git a/src/org/hibernate/engine/transaction/IsolatedWork.java b/src/org/hibernate/engine/transaction/IsolatedWork.java new file mode 100644 index 0000000000..48c70115ba --- /dev/null +++ b/src/org/hibernate/engine/transaction/IsolatedWork.java @@ -0,0 +1,22 @@ +package org.hibernate.engine.transaction; + +import org.hibernate.HibernateException; + +import java.sql.Connection; + +/** + * Represents work that needs to be performed in a manner + * which isolates it from any current application unit of + * work transaction. + * + * @author Steve Ebersole + */ +public interface IsolatedWork { + /** + * Perform the actual work to be done. + * + * @param connection The JDBC connection to use. + * @throws HibernateException + */ + public void doWork(Connection connection) throws HibernateException; +} diff --git a/src/org/hibernate/engine/transaction/Isolater.java b/src/org/hibernate/engine/transaction/Isolater.java new file mode 100644 index 0000000000..a9e80fd05d --- /dev/null +++ b/src/org/hibernate/engine/transaction/Isolater.java @@ -0,0 +1,230 @@ +package org.hibernate.engine.transaction; + +import java.sql.Connection; +import java.sql.SQLException; +import javax.transaction.Transaction; +import javax.transaction.TransactionManager; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; + +/** + * Class which provides the isolation semantics required by + * an {@link IsolatedWork}. Processing comes in two flavors:

      + *
    • {@link #doIsolatedWork} : makes sure the work to be done is + * performed in a seperate, distinct transaction
    • + *
    • {@link #doNonTransactedWork} : makes sure the work to be + * done is performed outside the scope of any transaction
    • + *
    + * + * @author Steve Ebersole + */ +public class Isolater { + + private static final Log log = LogFactory.getLog( Isolater.class ); + + /** + * Ensures that all processing actually performed by the given work will + * occur on a seperate transaction. + * + * @param work The work to be performed. + * @param session The session from which this request is originating. + * @throws HibernateException + */ + public static void doIsolatedWork(IsolatedWork work, SessionImplementor session) throws HibernateException { + boolean isJta = session.getFactory().getTransactionManager() != null; + if ( isJta ) { + new JtaDelegate( session ).delegateWork( work, true ); + } + else { + new JdbcDelegate( session ).delegateWork( work, true ); + } + } + + /** + * Ensures that all processing actually performed by the given work will + * occur outside of a transaction. + * + * @param work The work to be performed. + * @param session The session from which this request is originating. + * @throws HibernateException + */ + public static void doNonTransactedWork(IsolatedWork work, SessionImplementor session) throws HibernateException { + boolean isJta = session.getFactory().getTransactionManager() != null; + if ( isJta ) { + new JtaDelegate( session ).delegateWork( work, false ); + } + else { + new JdbcDelegate( session ).delegateWork( work, false ); + } + } + + // should be ok performance-wise to generate new delegate instances for each + // request since these are locally stack-scoped. Besides, it makes the code + // much easier to read than the old TransactionHelper stuff... + + private static interface Delegate { + public void delegateWork(IsolatedWork work, boolean transacted) throws HibernateException; + } + + /** + * An isolation delegate for JTA-based transactions. Essentially susepnds + * any current transaction, does the work in a new transaction, and then + * resumes the initial transaction (if there was one). + */ + public static class JtaDelegate implements Delegate { + private final SessionImplementor session; + + public JtaDelegate(SessionImplementor session) { + this.session = session; + } + + public void delegateWork(IsolatedWork work, boolean transacted) throws HibernateException { + TransactionManager transactionManager = session.getFactory().getTransactionManager(); + Transaction surroundingTransaction = null; + Connection connection = null; + boolean caughtException = false; + + try { + // First we need to suspend any current JTA transaction and obtain + // a JDBC connection + surroundingTransaction = transactionManager.suspend(); + if ( log.isDebugEnabled() ) { + log.debug( "surrounding JTA transaction suspended [" + surroundingTransaction + "]" ); + } + + if ( transacted ) { + transactionManager.begin(); + } + + connection = session.getBatcher().openConnection(); + + // perform the actual work + work.doWork( connection ); + + // if everything went ok, commit the transaction and close the obtained + // connection handle... + session.getBatcher().closeConnection( connection ); + + if ( transacted ) { + transactionManager.commit(); + } + } + catch( Throwable t ) { + // at some point the processing went bad, so we need to: + // 1) make sure the connection handle gets released + // 2) try to cleanup the JTA context as much as possible + caughtException = true; + try { + if ( connection != null && !connection.isClosed() ) { + session.getBatcher().closeConnection( connection ); + } + } + catch( Throwable ignore ) { + log.trace( "unable to release connection on exception [" + ignore + "]" ); + } + if ( transacted ) { + try { + transactionManager.rollback(); + } + catch( Throwable ignore ) { + log.trace( "unable to rollback new transaction on exception [" + ignore + "]" ); + } + } + // finally handle the exception + if ( t instanceof HibernateException ) { + throw ( HibernateException ) t; + } + else { + throw new HibernateException( "error performing isolated work", t ); + } + } + finally { + if ( surroundingTransaction != null ) { + try { + transactionManager.resume( surroundingTransaction ); + if ( log.isDebugEnabled() ) { + log.debug( "surrounding JTA transaction resumed [" + surroundingTransaction + "]" ); + } + } + catch( Throwable t ) { + if ( !caughtException ) { + throw new HibernateException( "unable to resume previously suspended transaction", t ); + } + } + } + } + } + } + + /** + * An isolation delegate for JDBC-based transactions. Basically just + * grabs a new connection and does the work on that. + */ + public static class JdbcDelegate implements Delegate { + private final SessionImplementor session; + + public JdbcDelegate(SessionImplementor session) { + this.session = session; + } + + public void delegateWork(IsolatedWork work, boolean transacted) throws HibernateException { + Connection connection = null; + boolean wasAutoCommit = false; + try { + connection = session.getBatcher().openConnection(); + + if ( transacted ) { + if ( connection.getAutoCommit() ) { + wasAutoCommit = true; + connection.setAutoCommit( false ); + } + } + + work.doWork( connection ); + + if ( transacted ) { + connection.commit(); + } + } + catch( Throwable t ) { + try { + if ( transacted && connection != null && !connection.isClosed() ) { + connection.rollback(); + } + } + catch( Throwable ignore ) { + log.trace( "unable to release connection on exception [" + ignore + "]" ); + } + + if ( t instanceof HibernateException ) { + throw ( HibernateException ) t; + } + else if ( t instanceof SQLException ) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + ( SQLException ) t, + "error performing isolated work" + ); + } + else { + throw new HibernateException( "error performing isolated work", t ); + } + } + finally { + if ( transacted && wasAutoCommit ) { + try { + connection.setAutoCommit( true ); + } + catch( Throwable ignore ) { + log.trace( "was unable to reset connection back to auto-commit" ); + } + } + session.getBatcher().closeConnection( connection ); + } + } + } +} diff --git a/src/org/hibernate/event/AbstractEvent.java b/src/org/hibernate/event/AbstractEvent.java new file mode 100644 index 0000000000..8f1e8e3aad --- /dev/null +++ b/src/org/hibernate/event/AbstractEvent.java @@ -0,0 +1,35 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + + +/** + * Defines a base class for Session generated events. + * + * @author Steve Ebersole + */ +public abstract class AbstractEvent implements Serializable { + + private final EventSource session; + + /** + * Constructs an event from the given event session. + * + * @param source The session event source. + */ + public AbstractEvent(EventSource source) { + this.session = source; + } + + /** + * Returns the session event source for this event. This is the underlying + * session from which this event was generated. + * + * @return The session event source. + */ + public final EventSource getSession() { + return session; + } + +} diff --git a/src/org/hibernate/event/AutoFlushEvent.java b/src/org/hibernate/event/AutoFlushEvent.java new file mode 100644 index 0000000000..012a656690 --- /dev/null +++ b/src/org/hibernate/event/AutoFlushEvent.java @@ -0,0 +1,36 @@ +///$Id$ +package org.hibernate.event; + +import java.util.Set; + + +/** Defines an event class for the auto-flushing of a session. + * + * @author Steve Ebersole + */ +public class AutoFlushEvent extends FlushEvent { + + private Set querySpaces; + private boolean flushRequired; + + public AutoFlushEvent(Set querySpaces, EventSource source) { + super(source); + this.querySpaces = querySpaces; + } + + public Set getQuerySpaces() { + return querySpaces; + } + + public void setQuerySpaces(Set querySpaces) { + this.querySpaces = querySpaces; + } + + public boolean isFlushRequired() { + return flushRequired; + } + + public void setFlushRequired(boolean dirty) { + this.flushRequired = dirty; + } +} diff --git a/src/org/hibernate/event/AutoFlushEventListener.java b/src/org/hibernate/event/AutoFlushEventListener.java new file mode 100644 index 0000000000..b0ff0a9755 --- /dev/null +++ b/src/org/hibernate/event/AutoFlushEventListener.java @@ -0,0 +1,21 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of session auto-flush events. + * + * @author Steve Ebersole + */ +public interface AutoFlushEventListener extends Serializable { + + /** Handle the given auto-flush event. + * + * @param event The auto-flush event to be handled. + * @throws HibernateException + */ + public void onAutoFlush(AutoFlushEvent event) throws HibernateException; +} diff --git a/src/org/hibernate/event/DeleteEvent.java b/src/org/hibernate/event/DeleteEvent.java new file mode 100644 index 0000000000..7de09c6cf1 --- /dev/null +++ b/src/org/hibernate/event/DeleteEvent.java @@ -0,0 +1,59 @@ +//$Id$ +package org.hibernate.event; + + +/** Defines an event class for the deletion of an entity. + * + * @author Steve Ebersole + */ +public class DeleteEvent extends AbstractEvent { + + private Object object; + private String entityName; + private boolean cascadeDeleteEnabled; + + /** + * Constructs a new DeleteEvent instance. + * + * @param object The entity to be deleted. + * @param source The session from which the delete event was generated. + */ + public DeleteEvent(Object object, EventSource source) { + super(source); + if (object == null) { + throw new IllegalArgumentException( + "attempt to create delete event with null entity" + ); + } + this.object = object; + } + + public DeleteEvent(String entityName, Object object, EventSource source) { + this(object, source); + this.entityName = entityName; + } + + public DeleteEvent(String entityName, Object object, boolean isCascadeDeleteEnabled, EventSource source) { + this(object, source); + this.entityName = entityName; + cascadeDeleteEnabled = isCascadeDeleteEnabled; + } + + /** + * Returns the encapsulated entity to be deleed. + * + * @return The entity to be deleted. + */ + public Object getObject() { + return object; + } + + public String getEntityName() { + return entityName; + } + + public boolean isCascadeDeleteEnabled() { + return cascadeDeleteEnabled; + } + +} diff --git a/src/org/hibernate/event/DeleteEventListener.java b/src/org/hibernate/event/DeleteEventListener.java new file mode 100644 index 0000000000..184103ec97 --- /dev/null +++ b/src/org/hibernate/event/DeleteEventListener.java @@ -0,0 +1,24 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; +import java.util.Set; + +/** + * Defines the contract for handling of deletion events generated from a session. + * + * @author Steve Ebersole + */ +public interface DeleteEventListener extends Serializable { + + /** Handle the given delete event. + * + * @param event The delete event to be handled. + * @throws HibernateException + */ + public void onDelete(DeleteEvent event) throws HibernateException; + + public void onDelete(DeleteEvent event, Set transientEntities) throws HibernateException; +} diff --git a/src/org/hibernate/event/DirtyCheckEvent.java b/src/org/hibernate/event/DirtyCheckEvent.java new file mode 100644 index 0000000000..d0d8fdf358 --- /dev/null +++ b/src/org/hibernate/event/DirtyCheckEvent.java @@ -0,0 +1,25 @@ +//$Id$ +package org.hibernate.event; + + +/** Defines an event class for the dirty-checking of a session. + * + * @author Steve Ebersole + */ +public class DirtyCheckEvent extends FlushEvent { + + private boolean dirty; + + public DirtyCheckEvent(EventSource source) { + super(source); + } + + public boolean isDirty() { + return dirty; + } + + public void setDirty(boolean dirty) { + this.dirty = dirty; + } + +} diff --git a/src/org/hibernate/event/DirtyCheckEventListener.java b/src/org/hibernate/event/DirtyCheckEventListener.java new file mode 100644 index 0000000000..5ec0e75e84 --- /dev/null +++ b/src/org/hibernate/event/DirtyCheckEventListener.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of session dirty-check events. + * + * @author Steve Ebersole + */ +public interface DirtyCheckEventListener extends Serializable { + + /** Handle the given dirty-check event. + * + * @param event The dirty-check event to be handled. + * @throws HibernateException + */ + public void onDirtyCheck(DirtyCheckEvent event) throws HibernateException; + +} diff --git a/src/org/hibernate/event/EventListeners.java b/src/org/hibernate/event/EventListeners.java new file mode 100644 index 0000000000..43801d5ab8 --- /dev/null +++ b/src/org/hibernate/event/EventListeners.java @@ -0,0 +1,390 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; +import java.lang.reflect.Field; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.hibernate.AssertionFailure; +import org.hibernate.MappingException; +import org.hibernate.cfg.Configuration; +import org.hibernate.event.def.DefaultAutoFlushEventListener; +import org.hibernate.event.def.DefaultDeleteEventListener; +import org.hibernate.event.def.DefaultDirtyCheckEventListener; +import org.hibernate.event.def.DefaultEvictEventListener; +import org.hibernate.event.def.DefaultFlushEntityEventListener; +import org.hibernate.event.def.DefaultFlushEventListener; +import org.hibernate.event.def.DefaultInitializeCollectionEventListener; +import org.hibernate.event.def.DefaultLoadEventListener; +import org.hibernate.event.def.DefaultLockEventListener; +import org.hibernate.event.def.DefaultMergeEventListener; +import org.hibernate.event.def.DefaultPersistEventListener; +import org.hibernate.event.def.DefaultPostLoadEventListener; +import org.hibernate.event.def.DefaultPreLoadEventListener; +import org.hibernate.event.def.DefaultRefreshEventListener; +import org.hibernate.event.def.DefaultReplicateEventListener; +import org.hibernate.event.def.DefaultSaveEventListener; +import org.hibernate.event.def.DefaultSaveOrUpdateCopyEventListener; +import org.hibernate.event.def.DefaultSaveOrUpdateEventListener; +import org.hibernate.event.def.DefaultUpdateEventListener; +import org.hibernate.event.def.DefaultPersistOnFlushEventListener; +import org.hibernate.util.Cloneable; + +/** + * A convience holder for all defined session event listeners. + * + * @author Steve Ebersole + */ +public class EventListeners extends Cloneable implements Serializable { + + private LoadEventListener[] loadEventListeners = { new DefaultLoadEventListener() }; + private SaveOrUpdateEventListener[] saveOrUpdateEventListeners = { new DefaultSaveOrUpdateEventListener() }; + private MergeEventListener[] mergeEventListeners = { new DefaultMergeEventListener() }; + private PersistEventListener[] persistEventListeners = { new DefaultPersistEventListener() }; + private PersistEventListener[] persistOnFlushEventListeners = { new DefaultPersistOnFlushEventListener() }; + private ReplicateEventListener[] replicateEventListeners = { new DefaultReplicateEventListener() }; + private DeleteEventListener[] deleteEventListeners = { new DefaultDeleteEventListener() }; + private AutoFlushEventListener[] autoFlushEventListeners = { new DefaultAutoFlushEventListener() }; + private DirtyCheckEventListener[] dirtyCheckEventListeners = { new DefaultDirtyCheckEventListener() }; + private FlushEventListener[] flushEventListeners = { new DefaultFlushEventListener() }; + private EvictEventListener[] evictEventListeners = { new DefaultEvictEventListener() }; + private LockEventListener[] lockEventListeners = { new DefaultLockEventListener() }; + private RefreshEventListener[] refreshEventListeners = { new DefaultRefreshEventListener() }; + private FlushEntityEventListener[] flushEntityEventListeners = { new DefaultFlushEntityEventListener() }; + private InitializeCollectionEventListener[] initializeCollectionEventListeners = + { new DefaultInitializeCollectionEventListener() }; + + private PostLoadEventListener[] postLoadEventListeners = { new DefaultPostLoadEventListener() }; + private PreLoadEventListener[] preLoadEventListeners = { new DefaultPreLoadEventListener() }; + + private PreDeleteEventListener[] preDeleteEventListeners = {}; + private PreUpdateEventListener[] preUpdateEventListeners = {}; + private PreInsertEventListener[] preInsertEventListeners = {}; + private PostDeleteEventListener[] postDeleteEventListeners = {}; + private PostUpdateEventListener[] postUpdateEventListeners = {}; + private PostInsertEventListener[] postInsertEventListeners = {}; + private PostDeleteEventListener[] postCommitDeleteEventListeners = {}; + private PostUpdateEventListener[] postCommitUpdateEventListeners = {}; + private PostInsertEventListener[] postCommitInsertEventListeners = {}; + + private SaveOrUpdateEventListener[] saveEventListeners = { new DefaultSaveEventListener() }; + private SaveOrUpdateEventListener[] updateEventListeners = { new DefaultUpdateEventListener() }; + private MergeEventListener[] saveOrUpdateCopyEventListeners = { new DefaultSaveOrUpdateCopyEventListener() };//saveOrUpdateCopy() is deprecated! + + private static Map eventInterfaceFromType; + + static { + eventInterfaceFromType = new HashMap(); + + eventInterfaceFromType.put("auto-flush", AutoFlushEventListener.class); + eventInterfaceFromType.put("merge", MergeEventListener.class); + eventInterfaceFromType.put("create", PersistEventListener.class); + eventInterfaceFromType.put("create-onflush", PersistEventListener.class); + eventInterfaceFromType.put("delete", DeleteEventListener.class); + eventInterfaceFromType.put("dirty-check", DirtyCheckEventListener.class); + eventInterfaceFromType.put("evict", EvictEventListener.class); + eventInterfaceFromType.put("flush", FlushEventListener.class); + eventInterfaceFromType.put("flush-entity", FlushEntityEventListener.class); + eventInterfaceFromType.put("load", LoadEventListener.class); + eventInterfaceFromType.put("load-collection", InitializeCollectionEventListener.class); + eventInterfaceFromType.put("lock", LockEventListener.class); + eventInterfaceFromType.put("refresh", RefreshEventListener.class); + eventInterfaceFromType.put("replicate", ReplicateEventListener.class); + eventInterfaceFromType.put("save-update", SaveOrUpdateEventListener.class); + eventInterfaceFromType.put("save", SaveOrUpdateEventListener.class); + eventInterfaceFromType.put("update", SaveOrUpdateEventListener.class); + eventInterfaceFromType.put("pre-load", PreLoadEventListener.class); + eventInterfaceFromType.put("pre-update", PreUpdateEventListener.class); + eventInterfaceFromType.put("pre-delete", PreDeleteEventListener.class); + eventInterfaceFromType.put("pre-insert", PreInsertEventListener.class); + eventInterfaceFromType.put("post-load", PostLoadEventListener.class); + eventInterfaceFromType.put("post-update", PostUpdateEventListener.class); + eventInterfaceFromType.put("post-delete", PostDeleteEventListener.class); + eventInterfaceFromType.put("post-insert", PostInsertEventListener.class); + eventInterfaceFromType.put("post-commit-update", PostUpdateEventListener.class); + eventInterfaceFromType.put("post-commit-delete", PostDeleteEventListener.class); + eventInterfaceFromType.put("post-commit-insert", PostInsertEventListener.class); + eventInterfaceFromType = Collections.unmodifiableMap( eventInterfaceFromType ); + } + + public Class getListenerClassFor(String type) { + Class clazz = (Class) eventInterfaceFromType.get(type); + + if (clazz == null) { + throw new MappingException("Unrecognized listener type [" + type + "]"); + } + + return clazz; + } + + public LoadEventListener[] getLoadEventListeners() { + return loadEventListeners; + } + + public void setLoadEventListeners(LoadEventListener[] loadEventListener) { + this.loadEventListeners = loadEventListener; + } + + public ReplicateEventListener[] getReplicateEventListeners() { + return replicateEventListeners; + } + + public void setReplicateEventListeners(ReplicateEventListener[] replicateEventListener) { + this.replicateEventListeners = replicateEventListener; + } + + public DeleteEventListener[] getDeleteEventListeners() { + return deleteEventListeners; + } + + public void setDeleteEventListeners(DeleteEventListener[] deleteEventListener) { + this.deleteEventListeners = deleteEventListener; + } + + public AutoFlushEventListener[] getAutoFlushEventListeners() { + return autoFlushEventListeners; + } + + public void setAutoFlushEventListeners(AutoFlushEventListener[] autoFlushEventListener) { + this.autoFlushEventListeners = autoFlushEventListener; + } + + public DirtyCheckEventListener[] getDirtyCheckEventListeners() { + return dirtyCheckEventListeners; + } + + public void setDirtyCheckEventListeners(DirtyCheckEventListener[] dirtyCheckEventListener) { + this.dirtyCheckEventListeners = dirtyCheckEventListener; + } + + public FlushEventListener[] getFlushEventListeners() { + return flushEventListeners; + } + + public void setFlushEventListeners(FlushEventListener[] flushEventListener) { + this.flushEventListeners = flushEventListener; + } + + public EvictEventListener[] getEvictEventListeners() { + return evictEventListeners; + } + + public void setEvictEventListeners(EvictEventListener[] evictEventListener) { + this.evictEventListeners = evictEventListener; + } + + public LockEventListener[] getLockEventListeners() { + return lockEventListeners; + } + + public void setLockEventListeners(LockEventListener[] lockEventListener) { + this.lockEventListeners = lockEventListener; + } + + public RefreshEventListener[] getRefreshEventListeners() { + return refreshEventListeners; + } + + public void setRefreshEventListeners(RefreshEventListener[] refreshEventListener) { + this.refreshEventListeners = refreshEventListener; + } + + public InitializeCollectionEventListener[] getInitializeCollectionEventListeners() { + return initializeCollectionEventListeners; + } + + public void setInitializeCollectionEventListeners(InitializeCollectionEventListener[] initializeCollectionEventListener) { + this.initializeCollectionEventListeners = initializeCollectionEventListener; + } + + public FlushEntityEventListener[] getFlushEntityEventListeners() { + return flushEntityEventListeners; + } + + public void setFlushEntityEventListeners(FlushEntityEventListener[] flushEntityEventListener) { + this.flushEntityEventListeners = flushEntityEventListener; + } + + public SaveOrUpdateEventListener[] getSaveOrUpdateEventListeners() { + return saveOrUpdateEventListeners; + } + + public void setSaveOrUpdateEventListeners(SaveOrUpdateEventListener[] saveOrUpdateEventListener) { + this.saveOrUpdateEventListeners = saveOrUpdateEventListener; + } + + public MergeEventListener[] getMergeEventListeners() { + return mergeEventListeners; + } + + public void setMergeEventListeners(MergeEventListener[] mergeEventListener) { + this.mergeEventListeners = mergeEventListener; + } + + public PersistEventListener[] getPersistEventListeners() { + return persistEventListeners; + } + + public void setPersistEventListeners(PersistEventListener[] createEventListener) { + this.persistEventListeners = createEventListener; + } + + public PersistEventListener[] getPersistOnFlushEventListeners() { + return persistOnFlushEventListeners; + } + + public void setPersistOnFlushEventListeners(PersistEventListener[] createEventListener) { + this.persistOnFlushEventListeners = createEventListener; + } + + public MergeEventListener[] getSaveOrUpdateCopyEventListeners() { + return saveOrUpdateCopyEventListeners; + } + + public void setSaveOrUpdateCopyEventListeners(MergeEventListener[] saveOrUpdateCopyEventListener) { + this.saveOrUpdateCopyEventListeners = saveOrUpdateCopyEventListener; + } + + public SaveOrUpdateEventListener[] getSaveEventListeners() { + return saveEventListeners; + } + + public void setSaveEventListeners(SaveOrUpdateEventListener[] saveEventListener) { + this.saveEventListeners = saveEventListener; + } + + public SaveOrUpdateEventListener[] getUpdateEventListeners() { + return updateEventListeners; + } + + public void setUpdateEventListeners(SaveOrUpdateEventListener[] updateEventListener) { + this.updateEventListeners = updateEventListener; + } + + public PostLoadEventListener[] getPostLoadEventListeners() { + return postLoadEventListeners; + } + + public void setPostLoadEventListeners(PostLoadEventListener[] postLoadEventListener) { + this.postLoadEventListeners = postLoadEventListener; + } + + public PreLoadEventListener[] getPreLoadEventListeners() { + return preLoadEventListeners; + } + + public void setPreLoadEventListeners(PreLoadEventListener[] preLoadEventListener) { + this.preLoadEventListeners = preLoadEventListener; + } + + public PostDeleteEventListener[] getPostDeleteEventListeners() { + return postDeleteEventListeners; + } + + public PostInsertEventListener[] getPostInsertEventListeners() { + return postInsertEventListeners; + } + + public PostUpdateEventListener[] getPostUpdateEventListeners() { + return postUpdateEventListeners; + } + + public void setPostDeleteEventListeners(PostDeleteEventListener[] postDeleteEventListener) { + this.postDeleteEventListeners = postDeleteEventListener; + } + + public void setPostInsertEventListeners(PostInsertEventListener[] postInsertEventListener) { + this.postInsertEventListeners = postInsertEventListener; + } + + public void setPostUpdateEventListeners(PostUpdateEventListener[] postUpdateEventListener) { + this.postUpdateEventListeners = postUpdateEventListener; + } + + public PreDeleteEventListener[] getPreDeleteEventListeners() { + return preDeleteEventListeners; + } + + public void setPreDeleteEventListeners(PreDeleteEventListener[] preDeleteEventListener) { + this.preDeleteEventListeners = preDeleteEventListener; + } + + public PreInsertEventListener[] getPreInsertEventListeners() { + return preInsertEventListeners; + } + + public void setPreInsertEventListeners(PreInsertEventListener[] preInsertEventListener) { + this.preInsertEventListeners = preInsertEventListener; + } + + public PreUpdateEventListener[] getPreUpdateEventListeners() { + return preUpdateEventListeners; + } + + public void setPreUpdateEventListeners(PreUpdateEventListener[] preUpdateEventListener) { + this.preUpdateEventListeners = preUpdateEventListener; + } + + /** + * Call initialize() on any listeners that implement + * Initializable. + * @see Initializable + */ + public void initializeListeners(Configuration cfg) { + Field[] fields = getClass().getDeclaredFields(); + for ( int i = 0; i < fields.length; i++ ) { + Object[] listeners; + try { + Object listener = fields[i].get(this); + if (listener instanceof Object[]) { + listeners = (Object[]) listener; + } + else { + continue; + } + + } + catch (Exception e) { + throw new AssertionFailure("could not init listeners"); + } + int length = listeners.length; + for (int index = 0 ; index < length ; index++) { + Object listener = listeners[index]; + if (listener instanceof Initializable ) { + ( (Initializable) listener ).initialize(cfg); + } + } + + } + } + + public PostDeleteEventListener[] getPostCommitDeleteEventListeners() { + return postCommitDeleteEventListeners; + } + + public void setPostCommitDeleteEventListeners( + PostDeleteEventListener[] postCommitDeleteEventListeners) { + this.postCommitDeleteEventListeners = postCommitDeleteEventListeners; + } + + public PostInsertEventListener[] getPostCommitInsertEventListeners() { + return postCommitInsertEventListeners; + } + + public void setPostCommitInsertEventListeners( + PostInsertEventListener[] postCommitInsertEventListeners) { + this.postCommitInsertEventListeners = postCommitInsertEventListeners; + } + + public PostUpdateEventListener[] getPostCommitUpdateEventListeners() { + return postCommitUpdateEventListeners; + } + + public void setPostCommitUpdateEventListeners( + PostUpdateEventListener[] postCommitUpdateEventListeners) { + this.postCommitUpdateEventListeners = postCommitUpdateEventListeners; + } + +} diff --git a/src/org/hibernate/event/EventSource.java b/src/org/hibernate/event/EventSource.java new file mode 100755 index 0000000000..593591d47d --- /dev/null +++ b/src/org/hibernate/event/EventSource.java @@ -0,0 +1,63 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; + +import org.hibernate.HibernateException; +import org.hibernate.Session; +import org.hibernate.engine.ActionQueue; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.EntityPersister; + +/** + * @author Gavin King + */ +public interface EventSource extends SessionImplementor, Session { + + /** + * Get the ActionQueue for this session + */ + public ActionQueue getActionQueue(); + + /** + * Instantiate an entity instance, using either an interceptor, + * or the given persister + */ + public Object instantiate(EntityPersister persister, Serializable id) throws HibernateException; + + /** + * Force an immediate flush + */ + public void forceFlush(EntityEntry e) throws HibernateException; + + /** + * Cascade merge an entity instance + */ + public void merge(String entityName, Object object, Map copiedAlready) throws HibernateException; + /** + * Cascade persist an entity instance + */ + public void persist(String entityName, Object object, Map createdAlready) throws HibernateException; + + /** + * Cascade persist an entity instance during the flush process + */ + public void persistOnFlush(String entityName, Object object, Map copiedAlready); + /** + * Cascade refesh an entity instance + */ + public void refresh(Object object, Map refreshedAlready) throws HibernateException; + /** + * Cascade copy an entity instance + */ + public void saveOrUpdateCopy(String entityName, Object object, Map copiedAlready) throws HibernateException; + + /** + * Cascade delete an entity instance + */ + public void delete(String entityName, Object child, boolean isCascadeDeleteEnabled, Set transientEntities); + +} diff --git a/src/org/hibernate/event/EvictEvent.java b/src/org/hibernate/event/EvictEvent.java new file mode 100644 index 0000000000..c51a4cd508 --- /dev/null +++ b/src/org/hibernate/event/EvictEvent.java @@ -0,0 +1,26 @@ +//$Id$ +package org.hibernate.event; + + +/** + * Defines an event class for the evicting of an entity. + * + * @author Steve Ebersole + */ +public class EvictEvent extends AbstractEvent { + + private Object object; + + public EvictEvent(Object object, EventSource source) { + super(source); + this.object = object; + } + + public Object getObject() { + return object; + } + + public void setObject(Object object) { + this.object = object; + } +} diff --git a/src/org/hibernate/event/EvictEventListener.java b/src/org/hibernate/event/EvictEventListener.java new file mode 100644 index 0000000000..7f60d5b5c8 --- /dev/null +++ b/src/org/hibernate/event/EvictEventListener.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of evict events generated from a session. + * + * @author Steve Ebersole + */ +public interface EvictEventListener extends Serializable { + + /** + * Handle the given evict event. + * + * @param event The evict event to be handled. + * @throws HibernateException + */ + public void onEvict(EvictEvent event) throws HibernateException; +} diff --git a/src/org/hibernate/event/FlushEntityEvent.java b/src/org/hibernate/event/FlushEntityEvent.java new file mode 100755 index 0000000000..c4c608e2f6 --- /dev/null +++ b/src/org/hibernate/event/FlushEntityEvent.java @@ -0,0 +1,71 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.engine.EntityEntry; + +/** + * @author Gavin King + */ +public class FlushEntityEvent extends AbstractEvent { + + private Object entity; + private Object[] propertyValues; + private Object[] databaseSnapshot; + private int[] dirtyProperties; + private boolean hasDirtyCollection; + private boolean dirtyCheckPossible; + private boolean dirtyCheckHandledByInterceptor; + private EntityEntry entityEntry; + + public FlushEntityEvent(EventSource source, Object entity, EntityEntry entry) { + super(source); + this.entity = entity; + this.entityEntry = entry; + } + + public EntityEntry getEntityEntry() { + return entityEntry; + } + public Object[] getDatabaseSnapshot() { + return databaseSnapshot; + } + public void setDatabaseSnapshot(Object[] databaseSnapshot) { + this.databaseSnapshot = databaseSnapshot; + } + public boolean hasDatabaseSnapshot() { + return databaseSnapshot!=null; + } + public boolean isDirtyCheckHandledByInterceptor() { + return dirtyCheckHandledByInterceptor; + } + public void setDirtyCheckHandledByInterceptor(boolean dirtyCheckHandledByInterceptor) { + this.dirtyCheckHandledByInterceptor = dirtyCheckHandledByInterceptor; + } + public boolean isDirtyCheckPossible() { + return dirtyCheckPossible; + } + public void setDirtyCheckPossible(boolean dirtyCheckPossible) { + this.dirtyCheckPossible = dirtyCheckPossible; + } + public int[] getDirtyProperties() { + return dirtyProperties; + } + public void setDirtyProperties(int[] dirtyProperties) { + this.dirtyProperties = dirtyProperties; + } + public boolean hasDirtyCollection() { + return hasDirtyCollection; + } + public void setHasDirtyCollection(boolean hasDirtyCollection) { + this.hasDirtyCollection = hasDirtyCollection; + } + public Object[] getPropertyValues() { + return propertyValues; + } + public void setPropertyValues(Object[] propertyValues) { + this.propertyValues = propertyValues; + } + public Object getEntity() { + return entity; + } +} diff --git a/src/org/hibernate/event/FlushEntityEventListener.java b/src/org/hibernate/event/FlushEntityEventListener.java new file mode 100755 index 0000000000..2279ddf839 --- /dev/null +++ b/src/org/hibernate/event/FlushEntityEventListener.java @@ -0,0 +1,13 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.HibernateException; + +/** + * @author Gavin King + */ +public interface FlushEntityEventListener extends Serializable { + public void onFlushEntity(FlushEntityEvent event) throws HibernateException; +} diff --git a/src/org/hibernate/event/FlushEvent.java b/src/org/hibernate/event/FlushEvent.java new file mode 100644 index 0000000000..06c080659b --- /dev/null +++ b/src/org/hibernate/event/FlushEvent.java @@ -0,0 +1,16 @@ +//$Id$ +package org.hibernate.event; + + +/** + * Defines an event class for the flushing of a session. + * + * @author Steve Ebersole + */ +public class FlushEvent extends AbstractEvent { + + public FlushEvent(EventSource source) { + super(source); + } + +} diff --git a/src/org/hibernate/event/FlushEventListener.java b/src/org/hibernate/event/FlushEventListener.java new file mode 100644 index 0000000000..d2d1ff7218 --- /dev/null +++ b/src/org/hibernate/event/FlushEventListener.java @@ -0,0 +1,21 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of session flush events. + * + * @author Steve Ebersole + */ +public interface FlushEventListener extends Serializable { + + /** Handle the given flush event. + * + * @param event The flush event to be handled. + * @throws HibernateException + */ + public void onFlush(FlushEvent event) throws HibernateException; +} diff --git a/src/org/hibernate/event/Initializable.java b/src/org/hibernate/event/Initializable.java new file mode 100755 index 0000000000..2aea71b75f --- /dev/null +++ b/src/org/hibernate/event/Initializable.java @@ -0,0 +1,13 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.cfg.Configuration; + +/** + * An event listener that requires access to mappings to + * initialize state at initialization time. + * @author Gavin King + */ +public interface Initializable { + public void initialize(Configuration cfg); +} diff --git a/src/org/hibernate/event/InitializeCollectionEvent.java b/src/org/hibernate/event/InitializeCollectionEvent.java new file mode 100755 index 0000000000..2805f3c49d --- /dev/null +++ b/src/org/hibernate/event/InitializeCollectionEvent.java @@ -0,0 +1,24 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.collection.PersistentCollection; + +/** + * An event that occurs when a collection wants to be + * initialized + * + * @author Gavin King + */ +public class InitializeCollectionEvent extends AbstractEvent { + + private final PersistentCollection collection; + + public InitializeCollectionEvent(PersistentCollection collection, EventSource source) { + super(source); + this.collection = collection; + } + + public PersistentCollection getCollection() { + return collection; + } +} diff --git a/src/org/hibernate/event/InitializeCollectionEventListener.java b/src/org/hibernate/event/InitializeCollectionEventListener.java new file mode 100755 index 0000000000..a885077f7c --- /dev/null +++ b/src/org/hibernate/event/InitializeCollectionEventListener.java @@ -0,0 +1,18 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of collection initialization events + * generated by a session. + * + * @author Gavin King + */ +public interface InitializeCollectionEventListener extends Serializable { + + public void onInitializeCollection(InitializeCollectionEvent event) throws HibernateException; + +} diff --git a/src/org/hibernate/event/LoadEvent.java b/src/org/hibernate/event/LoadEvent.java new file mode 100644 index 0000000000..b6fd8115f4 --- /dev/null +++ b/src/org/hibernate/event/LoadEvent.java @@ -0,0 +1,107 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.LockMode; + +/** + * Defines an event class for the loading of an entity. + * + * @author Steve Ebersole + */ +public class LoadEvent extends AbstractEvent { + + public static final LockMode DEFAULT_LOCK_MODE = LockMode.NONE; + + private Serializable entityId; + private String entityClassName; + private Object instanceToLoad; + private LockMode lockMode; + private boolean isAssociationFetch; + private Object result; + + public LoadEvent(Serializable entityId, Object instanceToLoad, EventSource source) { + this(entityId, null, instanceToLoad, null, false, source); + } + + public LoadEvent(Serializable entityId, String entityClassName, LockMode lockMode, EventSource source) { + this(entityId, entityClassName, null, lockMode, false, source); + } + + public LoadEvent(Serializable entityId, String entityClassName, boolean isAssociationFetch, EventSource source) { + this(entityId, entityClassName, null, null, isAssociationFetch, source); + } + + public boolean isAssociationFetch() { + return isAssociationFetch; + } + + private LoadEvent( + Serializable entityId, + String entityClassName, + Object instanceToLoad, + LockMode lockMode, + boolean isAssociationFetch, + EventSource source) { + + super(source); + + if ( entityId == null ) { + throw new IllegalArgumentException("id to load is required for loading"); + } + + if ( lockMode == LockMode.WRITE ) { + throw new IllegalArgumentException("Invalid lock mode for loading"); + } + else if ( lockMode == null ) { + lockMode = DEFAULT_LOCK_MODE; + } + + this.entityId = entityId; + this.entityClassName = entityClassName; + this.instanceToLoad = instanceToLoad; + this.lockMode = lockMode; + this.isAssociationFetch = isAssociationFetch; + } + + public Serializable getEntityId() { + return entityId; + } + + public void setEntityId(Serializable entityId) { + this.entityId = entityId; + } + + public String getEntityClassName() { + return entityClassName; + } + + public void setEntityClassName(String entityClassName) { + this.entityClassName = entityClassName; + } + + public Object getInstanceToLoad() { + return instanceToLoad; + } + + public void setInstanceToLoad(Object instanceToLoad) { + this.instanceToLoad = instanceToLoad; + } + + public LockMode getLockMode() { + return lockMode; + } + + public void setLockMode(LockMode lockMode) { + this.lockMode = lockMode; + } + + public Object getResult() { + return result; + } + + public void setResult(Object result) { + this.result = result; + } +} diff --git a/src/org/hibernate/event/LoadEventListener.java b/src/org/hibernate/event/LoadEventListener.java new file mode 100644 index 0000000000..7bab91be54 --- /dev/null +++ b/src/org/hibernate/event/LoadEventListener.java @@ -0,0 +1,122 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of load events generated from a session. + * + * @author Steve Ebersole + */ +public interface LoadEventListener extends Serializable { + + /** + * Handle the given load event. + * + * @param event The load event to be handled. + * @return The result (i.e., the loaded entity). + * @throws HibernateException + */ + public void onLoad(LoadEvent event, LoadType loadType) throws HibernateException; + + public static final LoadType RELOAD = new LoadType("GET") + .setAllowNulls(false) + .setAllowProxyCreation(false) + .setCheckDeleted(true) + .setNakedEntityReturned(false); + + public static final LoadType GET = new LoadType("GET") + .setAllowNulls(true) + .setAllowProxyCreation(false) + .setCheckDeleted(true) + .setNakedEntityReturned(false); + + public static final LoadType LOAD = new LoadType("LOAD") + .setAllowNulls(false) + .setAllowProxyCreation(true) + .setCheckDeleted(true) + .setNakedEntityReturned(false); + + public static final LoadType IMMEDIATE_LOAD = new LoadType("IMMEDIATE_LOAD") + .setAllowNulls(true) + .setAllowProxyCreation(false) + .setCheckDeleted(false) + .setNakedEntityReturned(true); + + public static final LoadType INTERNAL_LOAD_EAGER = new LoadType("INTERNAL_LOAD_EAGER") + .setAllowNulls(false) + .setAllowProxyCreation(false) + .setCheckDeleted(false) + .setNakedEntityReturned(false); + + public static final LoadType INTERNAL_LOAD_LAZY = new LoadType("INTERNAL_LOAD_LAZY") + .setAllowNulls(false) + .setAllowProxyCreation(true) + .setCheckDeleted(false) + .setNakedEntityReturned(false); + + public static final LoadType INTERNAL_LOAD_NULLABLE = new LoadType("INTERNAL_LOAD_NULLABLE") + .setAllowNulls(true) + .setAllowProxyCreation(false) + .setCheckDeleted(false) + .setNakedEntityReturned(false); + + public static final class LoadType { + private String name; + + private boolean nakedEntityReturned; + private boolean allowNulls; + private boolean checkDeleted; + private boolean allowProxyCreation; + + private LoadType(String name) { + this.name = name; + } + + public boolean isAllowNulls() { + return allowNulls; + } + + private LoadType setAllowNulls(boolean allowNulls) { + this.allowNulls = allowNulls; + return this; + } + + public boolean isNakedEntityReturned() { + return nakedEntityReturned; + } + + private LoadType setNakedEntityReturned(boolean immediateLoad) { + this.nakedEntityReturned = immediateLoad; + return this; + } + + public boolean isCheckDeleted() { + return checkDeleted; + } + + private LoadType setCheckDeleted(boolean checkDeleted) { + this.checkDeleted = checkDeleted; + return this; + } + + public boolean isAllowProxyCreation() { + return allowProxyCreation; + } + + private LoadType setAllowProxyCreation(boolean allowProxyCreation) { + this.allowProxyCreation = allowProxyCreation; + return this; + } + + public String getName() { + return name; + } + + public String toString() { + return name; + } + } +} diff --git a/src/org/hibernate/event/LockEvent.java b/src/org/hibernate/event/LockEvent.java new file mode 100644 index 0000000000..90ada6adc8 --- /dev/null +++ b/src/org/hibernate/event/LockEvent.java @@ -0,0 +1,52 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.LockMode; + +/** + * Defines an event class for the locking of an entity. + * + * @author Steve Ebersole + */ +public class LockEvent extends AbstractEvent { + + private Object object; + private LockMode lockMode; + private String entityName; + + public LockEvent(String entityName, Object original, LockMode lockMode, EventSource source) { + this(original, lockMode, source); + this.entityName = entityName; + } + + public LockEvent(Object object, LockMode lockMode, EventSource source) { + super(source); + this.object = object; + this.lockMode = lockMode; + } + + public Object getObject() { + return object; + } + + public void setObject(Object object) { + this.object = object; + } + + public LockMode getLockMode() { + return lockMode; + } + + public void setLockMode(LockMode lockMode) { + this.lockMode = lockMode; + } + + public String getEntityName() { + return entityName; + } + + public void setEntityName(String entityName) { + this.entityName = entityName; + } + +} diff --git a/src/org/hibernate/event/LockEventListener.java b/src/org/hibernate/event/LockEventListener.java new file mode 100644 index 0000000000..daa64a1000 --- /dev/null +++ b/src/org/hibernate/event/LockEventListener.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of lock events generated from a session. + * + * @author Steve Ebersole + */ +public interface LockEventListener extends Serializable { + + /** Handle the given lock event. + * + * @param event The lock event to be handled. + * @throws HibernateException + */ + public void onLock(LockEvent event) throws HibernateException; +} + diff --git a/src/org/hibernate/event/MergeEvent.java b/src/org/hibernate/event/MergeEvent.java new file mode 100755 index 0000000000..03af0a1f1b --- /dev/null +++ b/src/org/hibernate/event/MergeEvent.java @@ -0,0 +1,82 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * An event class for merge() and saveOrUpdateCopy() + * + * @author Gavin King + */ +public class MergeEvent extends AbstractEvent { + + private Object original; + private Serializable requestedId; + private String entityName; + private Object entity; + private Object result; + + public MergeEvent(String entityName, Object original, EventSource source) { + this(original, source); + this.entityName = entityName; + } + + public MergeEvent(String entityName, Object original, Serializable id, EventSource source) { + this(entityName, original, source); + this.requestedId = id; + if ( requestedId == null ) { + throw new IllegalArgumentException( + "attempt to create merge event with null identifier" + ); + } + } + + public MergeEvent(Object object, EventSource source) { + super(source); + if ( object == null ) { + throw new IllegalArgumentException( + "attempt to create merge event with null entity" + ); + } + this.original = object; + } + + public Object getOriginal() { + return original; + } + + public void setOriginal(Object object) { + this.original = object; + } + + public Serializable getRequestedId() { + return requestedId; + } + + public void setRequestedId(Serializable requestedId) { + this.requestedId = requestedId; + } + + public String getEntityName() { + return entityName; + } + + public void setEntityName(String entityName) { + this.entityName = entityName; + } + + public Object getEntity() { + return entity; + } + public void setEntity(Object entity) { + this.entity = entity; + } + + public Object getResult() { + return result; + } + + public void setResult(Object result) { + this.result = result; + } +} diff --git a/src/org/hibernate/event/MergeEventListener.java b/src/org/hibernate/event/MergeEventListener.java new file mode 100755 index 0000000000..a226c356a8 --- /dev/null +++ b/src/org/hibernate/event/MergeEventListener.java @@ -0,0 +1,32 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; +import java.util.Map; + +import org.hibernate.HibernateException; + +/** + * Defines the contract for handling of merge events generated from a session. + * + * @author Gavin King + */ +public interface MergeEventListener extends Serializable { + + /** + * Handle the given merge event. + * + * @param event The merge event to be handled. + * @throws HibernateException + */ + public void onMerge(MergeEvent event) throws HibernateException; + + /** + * Handle the given merge event. + * + * @param event The merge event to be handled. + * @throws HibernateException + */ + public void onMerge(MergeEvent event, Map copiedAlready) throws HibernateException; + +} diff --git a/src/org/hibernate/event/PersistEvent.java b/src/org/hibernate/event/PersistEvent.java new file mode 100755 index 0000000000..2d38e36b82 --- /dev/null +++ b/src/org/hibernate/event/PersistEvent.java @@ -0,0 +1,47 @@ +//$Id$ +package org.hibernate.event; + + + +/** + * An event class for persist() + * + * @author Gavin King + */ +public class PersistEvent extends AbstractEvent { + + private Object object; + private String entityName; + + public PersistEvent(String entityName, Object original, EventSource source) { + this(original, source); + this.entityName = entityName; + } + + public PersistEvent(Object object, EventSource source) { + super(source); + if ( object == null ) { + throw new IllegalArgumentException( + "attempt to create create event with null entity" + ); + } + this.object = object; + } + + public Object getObject() { + return object; + } + + public void setObject(Object object) { + this.object = object; + } + + public String getEntityName() { + return entityName; + } + + public void setEntityName(String entityName) { + this.entityName = entityName; + } + +} diff --git a/src/org/hibernate/event/PersistEventListener.java b/src/org/hibernate/event/PersistEventListener.java new file mode 100755 index 0000000000..92b7aa4236 --- /dev/null +++ b/src/org/hibernate/event/PersistEventListener.java @@ -0,0 +1,32 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; +import java.util.Map; + +import org.hibernate.HibernateException; + +/** + * Defines the contract for handling of create events generated from a session. + * + * @author Gavin King + */ +public interface PersistEventListener extends Serializable { + + /** + * Handle the given create event. + * + * @param event The create event to be handled. + * @throws HibernateException + */ + public void onPersist(PersistEvent event) throws HibernateException; + + /** + * Handle the given create event. + * + * @param event The create event to be handled. + * @throws HibernateException + */ + public void onPersist(PersistEvent event, Map createdAlready) throws HibernateException; + +} diff --git a/src/org/hibernate/event/PostDeleteEvent.java b/src/org/hibernate/event/PostDeleteEvent.java new file mode 100644 index 0000000000..b88714eb2c --- /dev/null +++ b/src/org/hibernate/event/PostDeleteEvent.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; + +/** + * Occurs after deleting an item from the datastore + * + * @author Gavin King + */ +public class PostDeleteEvent extends AbstractEvent { + private Object entity; + private EntityPersister persister; + private Serializable id; + private Object[] deletedState; + + public PostDeleteEvent( + Object entity, + Serializable id, + Object[] deletedState, + EntityPersister persister, + EventSource source + ) { + super(source); + this.entity = entity; + this.id = id; + this.persister = persister; + this.deletedState = deletedState; + } + + public Serializable getId() { + return id; + } + public EntityPersister getPersister() { + return persister; + } + public Object getEntity() { + return entity; + } + public Object[] getDeletedState() { + return deletedState; + } +} diff --git a/src/org/hibernate/event/PostDeleteEventListener.java b/src/org/hibernate/event/PostDeleteEventListener.java new file mode 100644 index 0000000000..08b5077797 --- /dev/null +++ b/src/org/hibernate/event/PostDeleteEventListener.java @@ -0,0 +1,13 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Called after deleting an item from the datastore + * + * @author Gavin King + */ +public interface PostDeleteEventListener extends Serializable { + public void onPostDelete(PostDeleteEvent event); +} diff --git a/src/org/hibernate/event/PostInsertEvent.java b/src/org/hibernate/event/PostInsertEvent.java new file mode 100755 index 0000000000..1343086121 --- /dev/null +++ b/src/org/hibernate/event/PostInsertEvent.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; + +/** + * Occurs after inserting an item in the datastore + * + * @author Gavin King + */ +public class PostInsertEvent extends AbstractEvent { + private Object entity; + private EntityPersister persister; + private Object[] state; + private Serializable id; + + public PostInsertEvent( + Object entity, + Serializable id, + Object[] state, + EntityPersister persister, + EventSource source + ) { + super(source); + this.entity = entity; + this.id = id; + this.state = state; + this.persister = persister; + } + + public Object getEntity() { + return entity; + } + public Serializable getId() { + return id; + } + public EntityPersister getPersister() { + return persister; + } + public Object[] getState() { + return state; + } +} diff --git a/src/org/hibernate/event/PostInsertEventListener.java b/src/org/hibernate/event/PostInsertEventListener.java new file mode 100755 index 0000000000..b8c98c5685 --- /dev/null +++ b/src/org/hibernate/event/PostInsertEventListener.java @@ -0,0 +1,13 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Called after insterting an item in the datastore + * + * @author Gavin King + */ +public interface PostInsertEventListener extends Serializable { + public void onPostInsert(PostInsertEvent event); +} diff --git a/src/org/hibernate/event/PostLoadEvent.java b/src/org/hibernate/event/PostLoadEvent.java new file mode 100644 index 0000000000..d6afa41b23 --- /dev/null +++ b/src/org/hibernate/event/PostLoadEvent.java @@ -0,0 +1,49 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; + +/** + * Occurs after an an entity instance is fully loaded. + * + * @author Kabir Khan, Gavin King + */ +public class PostLoadEvent extends AbstractEvent { + private Object entity; + private Serializable id; + private EntityPersister persister; + + public PostLoadEvent(EventSource session) { + super(session); + } + + public Object getEntity() { + return entity; + } + + public EntityPersister getPersister() { + return persister; + } + + public Serializable getId() { + return id; + } + + public PostLoadEvent setEntity(Object entity) { + this.entity = entity; + return this; + } + + public PostLoadEvent setId(Serializable id) { + this.id = id; + return this; + } + + public PostLoadEvent setPersister(EntityPersister persister) { + this.persister = persister; + return this; + } + +} diff --git a/src/org/hibernate/event/PostLoadEventListener.java b/src/org/hibernate/event/PostLoadEventListener.java new file mode 100644 index 0000000000..7054d8e6b0 --- /dev/null +++ b/src/org/hibernate/event/PostLoadEventListener.java @@ -0,0 +1,13 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Occurs after an an entity instance is fully loaded. + * + * @author Kabir Khan + */ +public interface PostLoadEventListener extends Serializable { + public void onPostLoad(PostLoadEvent event); +} diff --git a/src/org/hibernate/event/PostUpdateEvent.java b/src/org/hibernate/event/PostUpdateEvent.java new file mode 100755 index 0000000000..f4d00d2806 --- /dev/null +++ b/src/org/hibernate/event/PostUpdateEvent.java @@ -0,0 +1,51 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; + +/** + * Occurs after the datastore is updated + * + * @author Gavin King + */ +public class PostUpdateEvent extends AbstractEvent { + private Object entity; + private EntityPersister persister; + private Object[] state; + private Object[] oldState; + private Serializable id; + + public PostUpdateEvent( + Object entity, + Serializable id, + Object[] state, + Object[] oldState, + EntityPersister persister, + EventSource source + ) { + super(source); + this.entity = entity; + this.id = id; + this.state = state; + this.oldState = oldState; + this.persister = persister; + } + + public Object getEntity() { + return entity; + } + public Serializable getId() { + return id; + } + public Object[] getOldState() { + return oldState; + } + public EntityPersister getPersister() { + return persister; + } + public Object[] getState() { + return state; + } +} diff --git a/src/org/hibernate/event/PostUpdateEventListener.java b/src/org/hibernate/event/PostUpdateEventListener.java new file mode 100755 index 0000000000..638a53878a --- /dev/null +++ b/src/org/hibernate/event/PostUpdateEventListener.java @@ -0,0 +1,13 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Called after updating the datastore + * + * @author Gavin King + */ +public interface PostUpdateEventListener extends Serializable { + public void onPostUpdate(PostUpdateEvent event); +} diff --git a/src/org/hibernate/event/PreDeleteEvent.java b/src/org/hibernate/event/PreDeleteEvent.java new file mode 100755 index 0000000000..91b879b2ca --- /dev/null +++ b/src/org/hibernate/event/PreDeleteEvent.java @@ -0,0 +1,44 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; + +/** + * Occurs before deleting an item from the datastore + * + * @author Gavin King + */ +public class PreDeleteEvent { + private Object entity; + private EntityPersister persister; + private Serializable id; + private Object[] deletedState; + + public Object getEntity() { + return entity; + } + public Serializable getId() { + return id; + } + public EntityPersister getPersister() { + return persister; + } + public Object[] getDeletedState() { + return deletedState; + } + + public PreDeleteEvent( + Object entity, + Serializable id, + Object[] deletedState, + EntityPersister persister + ) { + this.entity = entity; + this.persister = persister; + this.id = id; + this.deletedState = deletedState; + } + +} diff --git a/src/org/hibernate/event/PreDeleteEventListener.java b/src/org/hibernate/event/PreDeleteEventListener.java new file mode 100755 index 0000000000..e9f73823dc --- /dev/null +++ b/src/org/hibernate/event/PreDeleteEventListener.java @@ -0,0 +1,16 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Called before deleting an item from the datastore + * + * @author Gavin King + */ +public interface PreDeleteEventListener extends Serializable { + /** + * Return true if the operation should be vetoed + */ + public boolean onPreDelete(PreDeleteEvent event); +} diff --git a/src/org/hibernate/event/PreInsertEvent.java b/src/org/hibernate/event/PreInsertEvent.java new file mode 100755 index 0000000000..0776dd2c34 --- /dev/null +++ b/src/org/hibernate/event/PreInsertEvent.java @@ -0,0 +1,50 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.engine.SessionImplementor; + +/** + * Occurs before inserting an item in the datastore + * + * @author Gavin King + */ +public class PreInsertEvent { + private Object entity; + private EntityPersister persister; + private Object[] state; + private Serializable id; + private SessionImplementor source; + + public PreInsertEvent( + Object entity, + Serializable id, + Object[] state, + EntityPersister persister, + SessionImplementor source + ) { + this.source = source; + this.entity = entity; + this.id = id; + this.state = state; + this.persister = persister; + } + + public Object getEntity() { + return entity; + } + public Serializable getId() { + return id; + } + public EntityPersister getPersister() { + return persister; + } + public Object[] getState() { + return state; + } + public SessionImplementor getSource() { + return source; + } +} diff --git a/src/org/hibernate/event/PreInsertEventListener.java b/src/org/hibernate/event/PreInsertEventListener.java new file mode 100755 index 0000000000..7cbae6c2b0 --- /dev/null +++ b/src/org/hibernate/event/PreInsertEventListener.java @@ -0,0 +1,16 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Called before inserting an item in the datastore + * + * @author Gavin King + */ +public interface PreInsertEventListener extends Serializable { + /** + * Return true if the operation should be vetoed + */ + public boolean onPreInsert(PreInsertEvent event); +} diff --git a/src/org/hibernate/event/PreLoadEvent.java b/src/org/hibernate/event/PreLoadEvent.java new file mode 100755 index 0000000000..bc625570eb --- /dev/null +++ b/src/org/hibernate/event/PreLoadEvent.java @@ -0,0 +1,60 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; + +/** + * Called before injecting property values into a newly + * loaded entity instance. + * + * @author Gavin King + */ +public class PreLoadEvent extends AbstractEvent { + private Object entity; + private Object[] state; + private Serializable id; + private EntityPersister persister; + + public PreLoadEvent(EventSource session) { + super(session); + } + + public Object getEntity() { + return entity; + } + + public Serializable getId() { + return id; + } + + public EntityPersister getPersister() { + return persister; + } + + public Object[] getState() { + return state; + } + + public PreLoadEvent setEntity(Object entity) { + this.entity = entity; + return this; + } + + public PreLoadEvent setId(Serializable id) { + this.id = id; + return this; + } + + public PreLoadEvent setPersister(EntityPersister persister) { + this.persister = persister; + return this; + } + + public PreLoadEvent setState(Object[] state) { + this.state = state; + return this; + } + +} diff --git a/src/org/hibernate/event/PreLoadEventListener.java b/src/org/hibernate/event/PreLoadEventListener.java new file mode 100755 index 0000000000..29fa7fcb10 --- /dev/null +++ b/src/org/hibernate/event/PreLoadEventListener.java @@ -0,0 +1,14 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Called before injecting property values into a newly + * loaded entity instance. + * + * @author Gavin King + */ +public interface PreLoadEventListener extends Serializable { + public void onPreLoad(PreLoadEvent event); +} diff --git a/src/org/hibernate/event/PreUpdateEvent.java b/src/org/hibernate/event/PreUpdateEvent.java new file mode 100755 index 0000000000..f947d05e9d --- /dev/null +++ b/src/org/hibernate/event/PreUpdateEvent.java @@ -0,0 +1,56 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.engine.SessionImplementor; + +/** + * Occurs before updating the datastore + * + * @author Gavin King + */ +public class PreUpdateEvent { + private Object entity; + private EntityPersister persister; + private Object[] state; + private Object[] oldState; + private Serializable id; + private SessionImplementor source; + + public PreUpdateEvent( + Object entity, + Serializable id, + Object[] state, + Object[] oldState, + EntityPersister persister, + SessionImplementor source + ) { + this.source = source; + this.entity = entity; + this.id = id; + this.state = state; + this.oldState = oldState; + this.persister = persister; + } + + public Object getEntity() { + return entity; + } + public Serializable getId() { + return id; + } + public Object[] getOldState() { + return oldState; + } + public EntityPersister getPersister() { + return persister; + } + public Object[] getState() { + return state; + } + public SessionImplementor getSource() { + return source; + } +} diff --git a/src/org/hibernate/event/PreUpdateEventListener.java b/src/org/hibernate/event/PreUpdateEventListener.java new file mode 100755 index 0000000000..6eaf4a5003 --- /dev/null +++ b/src/org/hibernate/event/PreUpdateEventListener.java @@ -0,0 +1,16 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +/** + * Called before updating the datastore + * + * @author Gavin King + */ +public interface PreUpdateEventListener extends Serializable { + /** + * Return true if the operation should be vetoed + */ + public boolean onPreUpdate(PreUpdateEvent event); +} diff --git a/src/org/hibernate/event/RefreshEvent.java b/src/org/hibernate/event/RefreshEvent.java new file mode 100644 index 0000000000..92904db9e7 --- /dev/null +++ b/src/org/hibernate/event/RefreshEvent.java @@ -0,0 +1,39 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.LockMode; + +/** + * Defines an event class for the refreshing of an object. + * + * @author Steve Ebersole + */ +public class RefreshEvent extends AbstractEvent { + + private Object object; + private LockMode lockMode = LockMode.READ; + + public RefreshEvent(Object object, EventSource source) { + super(source); + if (object == null) { + throw new IllegalArgumentException("Attempt to generate refresh event with null object"); + } + this.object = object; + } + + public RefreshEvent(Object object, LockMode lockMode, EventSource source) { + this(object, source); + if (lockMode == null) { + throw new IllegalArgumentException("Attempt to generate refresh event with null lock mode"); + } + this.lockMode = lockMode; + } + + public Object getObject() { + return object; + } + + public LockMode getLockMode() { + return lockMode; + } +} diff --git a/src/org/hibernate/event/RefreshEventListener.java b/src/org/hibernate/event/RefreshEventListener.java new file mode 100644 index 0000000000..1ec5d31c65 --- /dev/null +++ b/src/org/hibernate/event/RefreshEventListener.java @@ -0,0 +1,26 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; +import java.util.Map; + +/** + * Defines the contract for handling of refresh events generated from a session. + * + * @author Steve Ebersole + */ +public interface RefreshEventListener extends Serializable { + + /** + * Handle the given refresh event. + * + * @param event The refresh event to be handled. + * @throws HibernateException + */ + public void onRefresh(RefreshEvent event) throws HibernateException; + + public void onRefresh(RefreshEvent event, Map refreshedAlready) throws HibernateException; + +} diff --git a/src/org/hibernate/event/ReplicateEvent.java b/src/org/hibernate/event/ReplicateEvent.java new file mode 100644 index 0000000000..603e92378c --- /dev/null +++ b/src/org/hibernate/event/ReplicateEvent.java @@ -0,0 +1,62 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.ReplicationMode; + +/** + * Defines an event class for the replication of an entity. + * + * @author Steve Ebersole + */ +public class ReplicateEvent extends AbstractEvent { + + private Object object; + private ReplicationMode replicationMode; + private String entityName; + + public ReplicateEvent(Object object, ReplicationMode replicationMode, EventSource source) { + this(null, object, replicationMode, source); + } + + public ReplicateEvent(String entityName, Object object, ReplicationMode replicationMode, EventSource source) { + super(source); + this.entityName = entityName; + + if ( object == null ) { + throw new IllegalArgumentException( + "attempt to create replication strategy with null entity" + ); + } + if ( replicationMode == null ) { + throw new IllegalArgumentException( + "attempt to create replication strategy with null replication mode" + ); + } + + this.object = object; + this.replicationMode = replicationMode; + } + + public Object getObject() { + return object; + } + + public void setObject(Object object) { + this.object = object; + } + + public ReplicationMode getReplicationMode() { + return replicationMode; + } + + public void setReplicationMode(ReplicationMode replicationMode) { + this.replicationMode = replicationMode; + } + + public String getEntityName() { + return entityName; + } + public void setEntityName(String entityName) { + this.entityName = entityName; + } +} diff --git a/src/org/hibernate/event/ReplicateEventListener.java b/src/org/hibernate/event/ReplicateEventListener.java new file mode 100644 index 0000000000..0d4d5542dc --- /dev/null +++ b/src/org/hibernate/event/ReplicateEventListener.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate.event; + +import org.hibernate.HibernateException; + +import java.io.Serializable; + +/** + * Defines the contract for handling of replicate events generated from a session. + * + * @author Steve Ebersole + */ +public interface ReplicateEventListener extends Serializable { + + /** Handle the given replicate event. + * + * @param event The replicate event to be handled. + * @throws HibernateException + */ + public void onReplicate(ReplicateEvent event) throws HibernateException; + +} diff --git a/src/org/hibernate/event/SaveOrUpdateEvent.java b/src/org/hibernate/event/SaveOrUpdateEvent.java new file mode 100755 index 0000000000..237e9e86d5 --- /dev/null +++ b/src/org/hibernate/event/SaveOrUpdateEvent.java @@ -0,0 +1,94 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.engine.EntityEntry; + +/** + * An event class for saveOrUpdate() + * + * @author Steve Ebersole + */ +public class SaveOrUpdateEvent extends AbstractEvent { + + private Object object; + private Serializable requestedId; + private String entityName; + private Object entity; + private EntityEntry entry; + private Serializable resultId; + + public SaveOrUpdateEvent(String entityName, Object original, EventSource source) { + this(original, source); + this.entityName = entityName; + } + + public SaveOrUpdateEvent(String entityName, Object original, Serializable id, EventSource source) { + this(entityName, original, source); + this.requestedId = id; + if ( requestedId == null ) { + throw new IllegalArgumentException( + "attempt to create saveOrUpdate event with null identifier" + ); + } + } + + public SaveOrUpdateEvent(Object object, EventSource source) { + super(source); + if ( object == null ) { + throw new IllegalArgumentException( + "attempt to create saveOrUpdate event with null entity" + ); + } + this.object = object; + } + + public Object getObject() { + return object; + } + + public void setObject(Object object) { + this.object = object; + } + + public Serializable getRequestedId() { + return requestedId; + } + + public void setRequestedId(Serializable requestedId) { + this.requestedId = requestedId; + } + + public String getEntityName() { + return entityName; + } + + public void setEntityName(String entityName) { + this.entityName = entityName; + } + + public Object getEntity() { + return entity; + } + + public void setEntity(Object entity) { + this.entity = entity; + } + + public EntityEntry getEntry() { + return entry; + } + + public void setEntry(EntityEntry entry) { + this.entry = entry; + } + + public Serializable getResultId() { + return resultId; + } + + public void setResultId(Serializable resultId) { + this.resultId = resultId; + } +} diff --git a/src/org/hibernate/event/SaveOrUpdateEventListener.java b/src/org/hibernate/event/SaveOrUpdateEventListener.java new file mode 100755 index 0000000000..38e93ff5bd --- /dev/null +++ b/src/org/hibernate/event/SaveOrUpdateEventListener.java @@ -0,0 +1,23 @@ +//$Id$ +package org.hibernate.event; + +import java.io.Serializable; + +import org.hibernate.HibernateException; + +/** + * Defines the contract for handling of update events generated from a session. + * + * @author Steve Ebersole + */ +public interface SaveOrUpdateEventListener extends Serializable { + + /** + * Handle the given update event. + * + * @param event The update event to be handled. + * @throws HibernateException + */ + public void onSaveOrUpdate(SaveOrUpdateEvent event) throws HibernateException; + +} diff --git a/src/org/hibernate/event/def/AbstractFlushingEventListener.java b/src/org/hibernate/event/def/AbstractFlushingEventListener.java new file mode 100644 index 0000000000..41166c3a87 --- /dev/null +++ b/src/org/hibernate/event/def/AbstractFlushingEventListener.java @@ -0,0 +1,356 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.action.CollectionRecreateAction; +import org.hibernate.action.CollectionRemoveAction; +import org.hibernate.action.CollectionUpdateAction; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.ActionQueue; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.engine.CollectionEntry; +import org.hibernate.engine.CollectionKey; +import org.hibernate.engine.Collections; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.PersistenceContext; +import org.hibernate.engine.Status; +import org.hibernate.event.EventSource; +import org.hibernate.event.FlushEntityEvent; +import org.hibernate.event.FlushEntityEventListener; +import org.hibernate.event.FlushEvent; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.Printer; +import org.hibernate.util.IdentityMap; +import org.hibernate.util.LazyIterator; + +/** + * A convenience base class for listeners whose functionality results in flushing. + * + * @author Steve Eberole + */ +public abstract class AbstractFlushingEventListener implements Serializable { + + private static final Log log = LogFactory.getLog(AbstractFlushingEventListener.class); + + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // Pre-flushing section + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Coordinates the processing necessary to get things ready for executions + * as db calls by preping the session caches and moving the appropriate + * entities and collections to their respective execution queues. + * + * @param event The flush event. + * @throws HibernateException Error flushing caches to execution queues. + */ + protected void flushEverythingToExecutions(FlushEvent event) throws HibernateException { + + log.trace("flushing session"); + + EventSource session = event.getSession(); + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + session.getInterceptor().preFlush( new LazyIterator( persistenceContext.getEntitiesByKey() ) ); + + prepareEntityFlushes(session); + // we could move this inside if we wanted to + // tolerate collection initializations during + // collection dirty checking: + prepareCollectionFlushes(session); + // now, any collections that are initialized + // inside this block do not get updated - they + // are ignored until the next flush + + persistenceContext.setFlushing(true); + try { + flushEntities(event); + flushCollections(session); + } + finally { + persistenceContext.setFlushing(false); + } + + //some statistics + if ( log.isDebugEnabled() ) { + log.debug( "Flushed: " + + session.getActionQueue().numberOfInsertions() + " insertions, " + + session.getActionQueue().numberOfUpdates() + " updates, " + + session.getActionQueue().numberOfDeletions() + " deletions to " + + persistenceContext.getEntityEntries().size() + " objects" + ); + log.debug( "Flushed: " + + session.getActionQueue().numberOfCollectionCreations() + " (re)creations, " + + session.getActionQueue().numberOfCollectionUpdates() + " updates, " + + session.getActionQueue().numberOfCollectionRemovals() + " removals to " + + persistenceContext.getCollectionEntries().size() + " collections" + ); + new Printer( session.getFactory() ).toString( + persistenceContext.getEntitiesByKey().values().iterator(), + session.getEntityMode() + ); + } + } + + /** + * process cascade save/update at the start of a flush to discover + * any newly referenced entity that must be passed to saveOrUpdate(), + * and also apply orphan delete + */ + private void prepareEntityFlushes(EventSource session) throws HibernateException { + + log.debug("processing flush-time cascades"); + + final Map.Entry[] list = IdentityMap.concurrentEntries( session.getPersistenceContext().getEntityEntries() ); + //safe from concurrent modification because of how entryList() is implemented on IdentityMap + final int size = list.length; + final Object anything = getAnything(); + for ( int i=0; i + *
  • Inserts, in the order they were performed + *
  • Updates + *
  • Deletion of collection elements + *
  • Insertion of collection elements + *
  • Deletes, in the order they were performed + * + */ + protected void performExecutions(EventSource session) throws HibernateException { + + log.trace("executing flush"); + + try { + session.getJDBCContext().getConnectionManager().flushBeginning(); + // we need to lock the collection caches before + // executing entity inserts/updates in order to + // account for bidi associations + session.getActionQueue().prepareActions(); + session.getActionQueue().executeActions(); + } + catch (HibernateException he) { + log.error("Could not synchronize database state with session", he); + throw he; + } + finally { + session.getJDBCContext().getConnectionManager().flushEnding(); + } + } + + + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // Post-flushing section + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * 1. Recreate the collection key -> collection map + * 2. rebuild the collection entries + * 3. call Interceptor.postFlush() + */ + protected void postFlush(SessionImplementor session) throws HibernateException { + + log.trace( "post flush" ); + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + persistenceContext.getCollectionsByKey().clear(); + persistenceContext.getBatchFetchQueue() + .clearSubselects(); //the database has changed now, so the subselect results need to be invalidated + + Iterator iter = persistenceContext.getCollectionEntries().entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry me = (Map.Entry) iter.next(); + CollectionEntry collectionEntry = (CollectionEntry) me.getValue(); + PersistentCollection persistentCollection = (PersistentCollection) me.getKey(); + collectionEntry.postFlush(persistentCollection); + if ( collectionEntry.getLoadedPersister() == null ) { + //if the collection is dereferenced, remove from the session cache + //iter.remove(); //does not work, since the entrySet is not backed by the set + persistenceContext.getCollectionEntries() + .remove(persistentCollection); + } + else { + //otherwise recreate the mapping between the collection and its key + CollectionKey collectionKey = new CollectionKey( + collectionEntry.getLoadedPersister(), + collectionEntry.getLoadedKey(), + session.getEntityMode() + ); + persistenceContext.getCollectionsByKey() + .put(collectionKey, persistentCollection); + } + } + + session.getInterceptor().postFlush( new LazyIterator( persistenceContext.getEntitiesByKey() ) ); + + } + +} diff --git a/src/org/hibernate/event/def/AbstractLockUpgradeEventListener.java b/src/org/hibernate/event/def/AbstractLockUpgradeEventListener.java new file mode 100644 index 0000000000..8f63db5935 --- /dev/null +++ b/src/org/hibernate/event/def/AbstractLockUpgradeEventListener.java @@ -0,0 +1,103 @@ +//$Id$ +package org.hibernate.event.def; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.ObjectDeletedException; +import org.hibernate.cache.CacheConcurrencyStrategy; +import org.hibernate.cache.CacheKey; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.Status; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; + +/** + * A convenience base class for listeners that respond to requests to perform a + * pessimistic lock upgrade on an entity. + * + * @author Gavin King + */ +public class AbstractLockUpgradeEventListener extends AbstractReassociateEventListener { + + private static final Log log = LogFactory.getLog(AbstractLockUpgradeEventListener.class); + + /** + * Performs a pessimistic lock upgrade on a given entity, if needed. + * + * @param object The entity for which to upgrade the lock. + * @param entry The entity's EntityEntry instance. + * @param requestedLockMode The lock mode being requested for locking. + * @param source The session which is the source of the event being processed. + * @throws HibernateException + */ + protected void upgradeLock(Object object, EntityEntry entry, LockMode requestedLockMode, SessionImplementor source) + throws HibernateException { + + if ( requestedLockMode.greaterThan( entry.getLockMode() ) ) { + // The user requested a "greater" (i.e. more restrictive) form of + // pessimistic lock + + if ( entry.getStatus() != Status.MANAGED ) { + throw new ObjectDeletedException( + "attempted to lock a deleted instance", + entry.getId(), + entry.getPersister().getEntityName() + ); + } + + final EntityPersister persister = entry.getPersister(); + + if ( log.isTraceEnabled() ) + log.trace( + "locking " + + MessageHelper.infoString( persister, entry.getId(), source.getFactory() ) + + " in mode: " + + requestedLockMode + ); + + final CacheConcurrencyStrategy.SoftLock lock; + final CacheKey ck; + if ( persister.hasCache() ) { + ck = new CacheKey( + entry.getId(), + persister.getIdentifierType(), + persister.getRootEntityName(), + source.getEntityMode(), + source.getFactory() + ); + lock = persister.getCache().lock( ck, entry.getVersion() ); + } + else { + ck = null; + lock = null; + } + + try { + if ( persister.isVersioned() && requestedLockMode == LockMode.FORCE ) { + // todo : should we check the current isolation mode explicitly? + Object nextVersion = persister.forceVersionIncrement( + entry.getId(), entry.getVersion(), source + ); + entry.forceLocked( object, nextVersion ); + } + else { + persister.lock( entry.getId(), entry.getVersion(), object, requestedLockMode, source ); + } + entry.setLockMode(requestedLockMode); + } + finally { + // the database now holds a lock + the object is flushed from the cache, + // so release the soft lock + if ( persister.hasCache() ) { + persister.getCache().release(ck, lock ); + } + } + + } + } + +} diff --git a/src/org/hibernate/event/def/AbstractReassociateEventListener.java b/src/org/hibernate/event/def/AbstractReassociateEventListener.java new file mode 100644 index 0000000000..3680b08ab9 --- /dev/null +++ b/src/org/hibernate/event/def/AbstractReassociateEventListener.java @@ -0,0 +1,87 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.LockMode; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.Status; +import org.hibernate.engine.Versioning; +import org.hibernate.event.AbstractEvent; +import org.hibernate.event.EventSource; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.TypeFactory; + +/** + * A convenience base class for listeners that respond to requests to reassociate an entity + * to a session ( such as through lock() or update() ). + * + * @author Gavin King + */ +public class AbstractReassociateEventListener implements Serializable { + + private static final Log log = LogFactory.getLog( AbstractReassociateEventListener.class ); + + /** + * Associates a given entity (either transient or associated with another session) to + * the given session. + * + * @param event The event triggering the re-association + * @param object The entity to be associated + * @param id The id of the entity. + * @param persister The entity's persister instance. + * + * @return An EntityEntry representing the entity within this session. + */ + protected final EntityEntry reassociate(AbstractEvent event, Object object, Serializable id, EntityPersister persister) { + + if ( log.isTraceEnabled() ) { + log.trace( + "reassociating transient instance: " + + MessageHelper.infoString( persister, id, event.getSession().getFactory() ) + ); + } + + EventSource source = event.getSession(); + EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); + + source.getPersistenceContext().checkUniqueness( key, object ); + + //get a snapshot + Object[] values = persister.getPropertyValues( object, source.getEntityMode() ); + TypeFactory.deepCopy( + values, + persister.getPropertyTypes(), + persister.getPropertyUpdateability(), + values, + source + ); + Object version = Versioning.getVersion( values, persister ); + + EntityEntry newEntry = source.getPersistenceContext().addEntity( + object, + Status.MANAGED, + values, + key, + version, + LockMode.NONE, + true, + persister, + false, + true //will be ignored, using the existing Entry instead + ); + + new OnLockVisitor( source, id, object ).process( object, persister ); + + persister.afterReassociate( object, source ); + + return newEntry; + + } + +} diff --git a/src/org/hibernate/event/def/AbstractSaveEventListener.java b/src/org/hibernate/event/def/AbstractSaveEventListener.java new file mode 100644 index 0000000000..4d3f19260d --- /dev/null +++ b/src/org/hibernate/event/def/AbstractSaveEventListener.java @@ -0,0 +1,542 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.LockMode; +import org.hibernate.NonUniqueObjectException; +import org.hibernate.action.EntityIdentityInsertAction; +import org.hibernate.action.EntityInsertAction; +import org.hibernate.classic.Lifecycle; +import org.hibernate.classic.Validatable; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.ForeignKeys; +import org.hibernate.engine.Nullability; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.Status; +import org.hibernate.engine.Versioning; +import org.hibernate.event.EventSource; +import org.hibernate.id.IdentifierGenerationException; +import org.hibernate.id.IdentifierGeneratorFactory; +import org.hibernate.intercept.FieldInterceptionHelper; +import org.hibernate.intercept.FieldInterceptor; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; + +/** + * A convenience bas class for listeners responding to save events. + * + * @author Steve Ebersole. + */ +public abstract class AbstractSaveEventListener extends AbstractReassociateEventListener { + + protected static final int PERSISTENT = 0; + protected static final int TRANSIENT = 1; + protected static final int DETACHED = 2; + protected static final int DELETED = 3; + + private static final Log log = LogFactory.getLog( AbstractSaveEventListener.class ); + + /** + * Prepares the save call using the given requested id. + * + * @param entity The entity to be saved. + * @param requestedId The id to which to associate the entity. + * @param entityName The name of the entity being saved. + * @param anything Generally cascade-specific information. + * @param source The session which is the source of this save event. + * + * @return The id used to save the entity. + */ + protected Serializable saveWithRequestedId( + Object entity, + Serializable requestedId, + String entityName, + Object anything, + EventSource source) { + return performSave( + entity, + requestedId, + source.getEntityPersister( entityName, entity ), + false, + anything, + source, + true + ); + } + + /** + * Prepares the save call using a newly generated id. + * + * @param entity The entity to be saved + * @param entityName The entity-name for the entity to be saved + * @param anything Generally cascade-specific information. + * @param source The session which is the source of this save event. + * @param requiresImmediateIdAccess does the event context require + * access to the identifier immediately after execution of this method (if + * not, post-insert style id generators may be postponed if we are outside + * a transaction). + * + * @return The id used to save the entity; may be null depending on the + * type of id generator used and the requiresImmediateIdAccess value + */ + protected Serializable saveWithGeneratedId( + Object entity, + String entityName, + Object anything, + EventSource source, + boolean requiresImmediateIdAccess) { + EntityPersister persister = source.getEntityPersister( entityName, entity ); + Serializable generatedId = persister.getIdentifierGenerator().generate( source, entity ); + if ( generatedId == null ) { + throw new IdentifierGenerationException( "null id generated for:" + entity.getClass() ); + } + else if ( generatedId == IdentifierGeneratorFactory.SHORT_CIRCUIT_INDICATOR ) { + return source.getIdentifier( entity ); + } + else if ( generatedId == IdentifierGeneratorFactory.POST_INSERT_INDICATOR ) { + return performSave( entity, null, persister, true, anything, source, requiresImmediateIdAccess ); + } + else { + + if ( log.isDebugEnabled() ) { + log.debug( + "generated identifier: " + + persister.getIdentifierType().toLoggableString( generatedId, source.getFactory() ) + + ", using strategy: " + + persister.getIdentifierGenerator().getClass().getName() + //TODO: define toString()s for generators + ); + } + + return performSave( entity, generatedId, persister, false, anything, source, true ); + } + } + + /** + * Ppepares the save call by checking the session caches for a pre-existing + * entity and performing any lifecycle callbacks. + * + * @param entity The entity to be saved. + * @param id The id by which to save the entity. + * @param persister The entity's persister instance. + * @param useIdentityColumn Is an identity column being used? + * @param anything Generally cascade-specific information. + * @param source The session from which the event originated. + * @param requiresImmediateIdAccess does the event context require + * access to the identifier immediately after execution of this method (if + * not, post-insert style id generators may be postponed if we are outside + * a transaction). + * + * @return The id used to save the entity; may be null depending on the + * type of id generator used and the requiresImmediateIdAccess value + */ + protected Serializable performSave( + Object entity, + Serializable id, + EntityPersister persister, + boolean useIdentityColumn, + Object anything, + EventSource source, + boolean requiresImmediateIdAccess) { + + if ( log.isTraceEnabled() ) { + log.trace( + "saving " + + MessageHelper.infoString( persister, id, source.getFactory() ) + ); + } + + EntityKey key; + if ( !useIdentityColumn ) { + key = new EntityKey( id, persister, source.getEntityMode() ); + Object old = source.getPersistenceContext().getEntity( key ); + if ( old != null ) { + if ( source.getPersistenceContext().getEntry( old ).getStatus() == Status.DELETED ) { + source.forceFlush( source.getPersistenceContext().getEntry( old ) ); + } + else { + throw new NonUniqueObjectException( id, persister.getEntityName() ); + } + } + persister.setIdentifier( entity, id, source.getEntityMode() ); + } + else { + key = null; + } + + if ( invokeSaveLifecycle( entity, persister, source ) ) { + return id; //EARLY EXIT + } + + return performSaveOrReplicate( + entity, + key, + persister, + useIdentityColumn, + anything, + source, + requiresImmediateIdAccess + ); + } + + protected boolean invokeSaveLifecycle(Object entity, EntityPersister persister, EventSource source) { + // Sub-insertions should occur before containing insertion so + // Try to do the callback now + if ( persister.implementsLifecycle( source.getEntityMode() ) ) { + log.debug( "calling onSave()" ); + if ( ( ( Lifecycle ) entity ).onSave( source ) ) { + log.debug( "insertion vetoed by onSave()" ); + return true; + } + } + return false; + } + + protected void validate(Object entity, EntityPersister persister, EventSource source) { + if ( persister.implementsValidatable( source.getEntityMode() ) ) { + ( ( Validatable ) entity ).validate(); + } + } + + /** + * Performs all the actual work needed to save an entity (well to get the save moved to + * the execution queue). + * + * @param entity The entity to be saved + * @param key The id to be used for saving the entity (or null, in the case of identity columns) + * @param persister The entity's persister instance. + * @param useIdentityColumn Should an identity column be used for id generation? + * @param anything Generally cascade-specific information. + * @param source The session which is the source of the current event. + * @param requiresImmediateIdAccess Is access to the identifier required immediately + * after the completion of the save? persist(), for example, does not require this... + * + * @return The id used to save the entity; may be null depending on the + * type of id generator used and the requiresImmediateIdAccess value + */ + protected Serializable performSaveOrReplicate( + Object entity, + EntityKey key, + EntityPersister persister, + boolean useIdentityColumn, + Object anything, + EventSource source, + boolean requiresImmediateIdAccess) { + + validate( entity, persister, source ); + + Serializable id = key == null ? null : key.getIdentifier(); + + boolean inTxn = source.getJDBCContext().isTransactionInProgress(); + boolean shouldDelayIdentityInserts = !inTxn && !requiresImmediateIdAccess; + + if ( useIdentityColumn && !shouldDelayIdentityInserts ) { + log.trace( "executing insertions" ); + source.getActionQueue().executeInserts(); + } + + // Put a placeholder in entries, so we don't recurse back and try to save() the + // same object again. QUESTION: should this be done before onSave() is called? + // likewise, should it be done before onUpdate()? + source.getPersistenceContext().addEntry( + entity, + Status.SAVING, + null, + null, + id, + null, + LockMode.WRITE, + useIdentityColumn, + persister, + false, + false + ); + + cascadeBeforeSave( source, persister, entity, anything ); + + Object[] values = persister.getPropertyValuesToInsert( entity, getMergeMap( anything ), source ); + Type[] types = persister.getPropertyTypes(); + + boolean substitute = substituteValuesIfNecessary( entity, id, values, persister, source ); + + if ( persister.hasCollections() ) { + substitute = substitute || visitCollectionsBeforeSave( entity, id, values, types, source ); + } + + if ( substitute ) { + persister.setPropertyValues( entity, values, source.getEntityMode() ); + } + + TypeFactory.deepCopy( + values, + types, + persister.getPropertyUpdateability(), + values, + source + ); + + new ForeignKeys.Nullifier( entity, false, useIdentityColumn, source ) + .nullifyTransientReferences( values, types ); + new Nullability( source ).checkNullability( values, persister, false ); + + if ( useIdentityColumn ) { + EntityIdentityInsertAction insert = new EntityIdentityInsertAction( + values, entity, persister, source, shouldDelayIdentityInserts + ); + if ( !shouldDelayIdentityInserts ) { + log.debug( "executing identity-insert immediately" ); + source.getActionQueue().execute( insert ); + id = insert.getGeneratedId(); + //now done in EntityIdentityInsertAction + //persister.setIdentifier( entity, id, source.getEntityMode() ); + key = new EntityKey( id, persister, source.getEntityMode() ); + source.getPersistenceContext().checkUniqueness( key, entity ); + //source.getBatcher().executeBatch(); //found another way to ensure that all batched joined inserts have been executed + } + else { + log.debug( "delaying identity-insert due to no transaction in progress" ); + source.getActionQueue().addAction( insert ); + key = insert.getDelayedEntityKey(); + } + } + + Object version = Versioning.getVersion( values, persister ); + source.getPersistenceContext().addEntity( + entity, + Status.MANAGED, + values, + key, + version, + LockMode.WRITE, + useIdentityColumn, + persister, + isVersionIncrementDisabled(), + false + ); + //source.getPersistenceContext().removeNonExist( new EntityKey( id, persister, source.getEntityMode() ) ); + + if ( !useIdentityColumn ) { + source.getActionQueue().addAction( + new EntityInsertAction( id, values, entity, version, persister, source ) + ); + } + + cascadeAfterSave( source, persister, entity, anything ); + + markInterceptorDirty( entity, persister, source ); + + return id; + } + + private void markInterceptorDirty(Object entity, EntityPersister persister, EventSource source) { + if ( FieldInterceptionHelper.isInstrumented( entity ) ) { + FieldInterceptor interceptor = FieldInterceptionHelper.injectFieldInterceptor( + entity, + persister.getEntityName(), + null, + source + ); + interceptor.dirty(); + } + } + + protected Map getMergeMap(Object anything) { + return null; + } + + /** + * After the save, will te version number be incremented + * if the instance is modified? + * + * @return True if the version will be incremented on an entity change after save; + * false otherwise. + */ + protected boolean isVersionIncrementDisabled() { + return false; + } + + protected boolean visitCollectionsBeforeSave(Object entity, Serializable id, Object[] values, Type[] types, EventSource source) { + WrapVisitor visitor = new WrapVisitor( source ); + // substitutes into values by side-effect + visitor.processEntityPropertyValues( values, types ); + return visitor.isSubstitutionRequired(); + } + + /** + * Perform any property value substitution that is necessary + * (interceptor callback, version initialization...) + * + * @param entity The entity + * @param id The entity identifier + * @param values The snapshot entity state + * @param persister The entity persister + * @param source The originating session + * + * @return True if the snapshot state changed such that + * reinjection of the values into the entity is required. + */ + protected boolean substituteValuesIfNecessary( + Object entity, + Serializable id, + Object[] values, + EntityPersister persister, + SessionImplementor source) { + boolean substitute = source.getInterceptor().onSave( + entity, + id, + values, + persister.getPropertyNames(), + persister.getPropertyTypes() + ); + + //keep the existing version number in the case of replicate! + if ( persister.isVersioned() ) { + substitute = Versioning.seedVersion( + values, + persister.getVersionProperty(), + persister.getVersionType(), + source + ) || substitute; + } + return substitute; + } + + /** + * Handles the calls needed to perform pre-save cascades for the given entity. + * + * @param source The session from whcih the save event originated. + * @param persister The entity's persister instance. + * @param entity The entity to be saved. + * @param anything Generally cascade-specific data + */ + protected void cascadeBeforeSave( + EventSource source, + EntityPersister persister, + Object entity, + Object anything) { + + // cascade-save to many-to-one BEFORE the parent is saved + source.getPersistenceContext().incrementCascadeLevel(); + try { + new Cascade( getCascadeAction(), Cascade.BEFORE_INSERT_AFTER_DELETE, source ) + .cascade( persister, entity, anything ); + } + finally { + source.getPersistenceContext().decrementCascadeLevel(); + } + } + + /** + * Handles to calls needed to perform post-save cascades. + * + * @param source The session from which the event originated. + * @param persister The entity's persister instance. + * @param entity The entity beng saved. + * @param anything Generally cascade-specific data + */ + protected void cascadeAfterSave( + EventSource source, + EntityPersister persister, + Object entity, + Object anything) { + + // cascade-save to collections AFTER the collection owner was saved + source.getPersistenceContext().incrementCascadeLevel(); + try { + new Cascade( getCascadeAction(), Cascade.AFTER_INSERT_BEFORE_DELETE, source ) + .cascade( persister, entity, anything ); + } + finally { + source.getPersistenceContext().decrementCascadeLevel(); + } + } + + protected abstract CascadingAction getCascadeAction(); + + /** + * Determine whether the entity is persistent, detached, or transient + * + * @param entity The entity to check + * @param entityName The name of the entity + * @param entry The entity's entry in the persistence context + * @param source The originating session. + * + * @return The state. + */ + protected int getEntityState( + Object entity, + String entityName, + EntityEntry entry, //pass this as an argument only to avoid double looking + SessionImplementor source) { + + if ( entry != null ) { // the object is persistent + + //the entity is associated with the session, so check its status + if ( entry.getStatus() != Status.DELETED ) { + // do nothing for persistent instances + if ( log.isTraceEnabled() ) { + log.trace( + "persistent instance of: " + + getLoggableName( entityName, entity ) + ); + } + return PERSISTENT; + } + else { + //ie. e.status==DELETED + if ( log.isTraceEnabled() ) { + log.trace( + "deleted instance of: " + + getLoggableName( entityName, entity ) + ); + } + return DELETED; + } + + } + else { // the object is transient or detached + + //the entity is not associated with the session, so + //try interceptor and unsaved-value + + if ( ForeignKeys.isTransient( entityName, entity, getAssumedUnsaved(), source ) ) { + if ( log.isTraceEnabled() ) { + log.trace( + "transient instance of: " + + getLoggableName( entityName, entity ) + ); + } + return TRANSIENT; + } + else { + if ( log.isTraceEnabled() ) { + log.trace( + "detached instance of: " + + getLoggableName( entityName, entity ) + ); + } + return DETACHED; + } + + } + } + + protected String getLoggableName(String entityName, Object entity) { + return entityName == null ? entity.getClass().getName() : entityName; + } + + protected Boolean getAssumedUnsaved() { + return null; + } + +} diff --git a/src/org/hibernate/event/def/AbstractVisitor.java b/src/org/hibernate/event/def/AbstractVisitor.java new file mode 100644 index 0000000000..95442bbf46 --- /dev/null +++ b/src/org/hibernate/event/def/AbstractVisitor.java @@ -0,0 +1,157 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.HibernateException; +import org.hibernate.event.EventSource; +import org.hibernate.intercept.LazyPropertyInitializer; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; + + +/** + * Abstract superclass of algorithms that walk + * a tree of property values of an entity, and + * perform specific functionality for collections, + * components and associated entities. + * + * @author Gavin King + */ +public abstract class AbstractVisitor { + + private final EventSource session; + + AbstractVisitor(EventSource session) { + this.session = session; + } + + /** + * Dispatch each property value to processValue(). + * + * @param values + * @param types + * @throws HibernateException + */ + void processValues(Object[] values, Type[] types) throws HibernateException { + for ( int i=0; i + * This is perfectly valid in Hibernate usage; JPA, however, forbids this. + * Thus, this is a hook for HEM to affect this behavior. + * + * @param event The event. + */ + protected void performDetachedEntityDeletionCheck(DeleteEvent event) { + // ok in normal Hibernate usage to delete a detached entity; JPA however + // forbids it, thus this is a hook for HEM to affect this behavior + } + + /** + * We encountered a delete request on a transient instance. + *

    + * This is a deviation from historical Hibernate (pre-3.2) behavior to + * align with the JPA spec, which states that transient entities can be + * passed to remove operation in which case cascades still need to be + * performed. + * + * @param session The session which is the source of the event + * @param entity The entity being delete processed + * @param cascadeDeleteEnabled Is cascading of deletes enabled + * @param persister The entity persister + * @param transientEntities A cache of already visited transient entities + * (to avoid infinite recursion). + */ + protected void deleteTransientEntity( + EventSource session, + Object entity, + boolean cascadeDeleteEnabled, + EntityPersister persister, + Set transientEntities) { + log.info( "handling transient entity in delete processing" ); + if ( transientEntities.contains( entity ) ) { + log.trace( "already handled transient entity; skipping" ); + return; + } + transientEntities.add( entity ); + cascadeBeforeDelete( session, persister, entity, null, transientEntities ); + cascadeAfterDelete( session, persister, entity, transientEntities ); + } + + /** + * Perform the entity deletion. Well, as with most operations, does not + * really perform it; just schedules an action/execution with the + * {@link org.hibernate.engine.ActionQueue} for execution during flush. + * + * @param session The originating session + * @param entity The entity to delete + * @param entityEntry The entity's entry in the {@link PersistenceContext} + * @param isCascadeDeleteEnabled Is delete cascading enabled? + * @param persister The entity persister. + * @param transientEntities A cache of already deleted entities. + */ + protected final void deleteEntity( + final EventSource session, + final Object entity, + final EntityEntry entityEntry, + final boolean isCascadeDeleteEnabled, + final EntityPersister persister, + final Set transientEntities) { + + if ( log.isTraceEnabled() ) { + log.trace( + "deleting " + + MessageHelper.infoString( persister, entityEntry.getId(), session.getFactory() ) + ); + } + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + final Type[] propTypes = persister.getPropertyTypes(); + final Object version = entityEntry.getVersion(); + + final Object[] currentState; + if ( entityEntry.getLoadedState() == null ) { //ie. the entity came in from update() + currentState = persister.getPropertyValues( entity, session.getEntityMode() ); + } + else { + currentState = entityEntry.getLoadedState(); + } + + final Object[] deletedState = createDeletedState( persister, currentState, session ); + entityEntry.setDeletedState( deletedState ); + + session.getInterceptor().onDelete( + entity, + entityEntry.getId(), + deletedState, + persister.getPropertyNames(), + propTypes + ); + + // before any callbacks, etc, so subdeletions see that this deletion happened first + persistenceContext.setEntryStatus( entityEntry, Status.DELETED ); + EntityKey key = new EntityKey( entityEntry.getId(), persister, session.getEntityMode() ); + + cascadeBeforeDelete( session, persister, entity, entityEntry, transientEntities ); + + new ForeignKeys.Nullifier( entity, true, false, session ) + .nullifyTransientReferences( entityEntry.getDeletedState(), propTypes ); + new Nullability( session ).checkNullability( entityEntry.getDeletedState(), persister, true ); + persistenceContext.getNullifiableEntityKeys().add( key ); + + // Ensures that containing deletions happen before sub-deletions + session.getActionQueue().addAction( + new EntityDeleteAction( + entityEntry.getId(), + deletedState, + version, + entity, + persister, + isCascadeDeleteEnabled, + session + ) + ); + + cascadeAfterDelete( session, persister, entity, transientEntities ); + + // the entry will be removed after the flush, and will no longer + // override the stale snapshot + // This is now handled by removeEntity() in EntityDeleteAction + //persistenceContext.removeDatabaseSnapshot(key); + } + + private Object[] createDeletedState(EntityPersister persister, Object[] currentState, EventSource session) { + Type[] propTypes = persister.getPropertyTypes(); + final Object[] deletedState = new Object[propTypes.length]; +// TypeFactory.deepCopy( currentState, propTypes, persister.getPropertyUpdateability(), deletedState, session ); + boolean[] copyability = new boolean[propTypes.length]; + java.util.Arrays.fill( copyability, true ); + TypeFactory.deepCopy( currentState, propTypes, copyability, deletedState, session ); + return deletedState; + } + + protected boolean invokeDeleteLifecycle(EventSource session, Object entity, EntityPersister persister) { + if ( persister.implementsLifecycle( session.getEntityMode() ) ) { + log.debug( "calling onDelete()" ); + if ( ( ( Lifecycle ) entity ).onDelete( session ) ) { + log.debug( "deletion vetoed by onDelete()" ); + return true; + } + } + return false; + } + + protected void cascadeBeforeDelete( + EventSource session, + EntityPersister persister, + Object entity, + EntityEntry entityEntry, + Set transientEntities) throws HibernateException { + + CacheMode cacheMode = session.getCacheMode(); + session.setCacheMode( CacheMode.GET ); + session.getPersistenceContext().incrementCascadeLevel(); + try { + // cascade-delete to collections BEFORE the collection owner is deleted + new Cascade( CascadingAction.DELETE, Cascade.AFTER_INSERT_BEFORE_DELETE, session ) + .cascade( persister, entity, transientEntities ); + } + finally { + session.getPersistenceContext().decrementCascadeLevel(); + session.setCacheMode( cacheMode ); + } + } + + protected void cascadeAfterDelete( + EventSource session, + EntityPersister persister, + Object entity, + Set transientEntities) throws HibernateException { + + CacheMode cacheMode = session.getCacheMode(); + session.setCacheMode( CacheMode.GET ); + session.getPersistenceContext().incrementCascadeLevel(); + try { + // cascade-delete to many-to-one AFTER the parent was deleted + new Cascade( CascadingAction.DELETE, Cascade.BEFORE_INSERT_AFTER_DELETE, session ) + .cascade( persister, entity, transientEntities ); + } + finally { + session.getPersistenceContext().decrementCascadeLevel(); + session.setCacheMode( cacheMode ); + } + } + +} diff --git a/src/org/hibernate/event/def/DefaultDirtyCheckEventListener.java b/src/org/hibernate/event/def/DefaultDirtyCheckEventListener.java new file mode 100644 index 0000000000..b27efadb22 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultDirtyCheckEventListener.java @@ -0,0 +1,41 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.HibernateException; +import org.hibernate.event.DirtyCheckEvent; +import org.hibernate.event.DirtyCheckEventListener; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Defines the default dirty-check event listener used by hibernate for + * checking the session for dirtiness in response to generated dirty-check + * events. + * + * @author Steve Ebersole + */ +public class DefaultDirtyCheckEventListener extends AbstractFlushingEventListener implements DirtyCheckEventListener { + + private static final Log log = LogFactory.getLog(DefaultDirtyCheckEventListener.class); + + /** Handle the given dirty-check event. + * + * @param event The dirty-check event to be handled. + * @throws HibernateException + */ + public void onDirtyCheck(DirtyCheckEvent event) throws HibernateException { + + int oldSize = event.getSession().getActionQueue().numberOfCollectionRemovals(); + + try { + flushEverythingToExecutions(event); + boolean wasNeeded = event.getSession().getActionQueue().hasAnyQueuedActions(); + log.debug( wasNeeded ? "session dirty" : "session not dirty" ); + event.setDirty( wasNeeded ); + } + finally { + event.getSession().getActionQueue().clearFromFlushNeededCheck( oldSize ); + } + + } +} diff --git a/src/org/hibernate/event/def/DefaultEvictEventListener.java b/src/org/hibernate/event/def/DefaultEvictEventListener.java new file mode 100644 index 0000000000..f16757f65c --- /dev/null +++ b/src/org/hibernate/event/def/DefaultEvictEventListener.java @@ -0,0 +1,92 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.PersistenceContext; +import org.hibernate.event.EventSource; +import org.hibernate.event.EvictEvent; +import org.hibernate.event.EvictEventListener; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; + +/** + * Defines the default evict event listener used by hibernate for evicting entities + * in response to generated flush events. In particular, this implementation will + * remove any hard references to the entity that are held by the infrastructure + * (references held by application or other persistent instances are okay) + * + * @author Steve Ebersole + */ +public class DefaultEvictEventListener implements EvictEventListener { + + private static final Log log = LogFactory.getLog(DefaultEvictEventListener.class); + + /** + * Handle the given evict event. + * + * @param event The evict event to be handled. + * @throws HibernateException + */ + public void onEvict(EvictEvent event) throws HibernateException { + EventSource source = event.getSession(); + final Object object = event.getObject(); + final PersistenceContext persistenceContext = source.getPersistenceContext(); + + if ( object instanceof HibernateProxy ) { + LazyInitializer li = ( (HibernateProxy) object ).getHibernateLazyInitializer(); + Serializable id = li.getIdentifier(); + EntityPersister persister = source.getFactory().getEntityPersister( li.getEntityName() ); + if ( id == null ) { + throw new IllegalArgumentException("null identifier"); + } + EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); + persistenceContext.removeProxy( key ); + if ( !li.isUninitialized() ) { + final Object entity = persistenceContext.removeEntity(key); + if ( entity != null ) { + EntityEntry e = event.getSession().getPersistenceContext().removeEntry(entity); + doEvict( entity, key, e.getPersister(), event.getSession() ); + } + } + li.setSession( null ); + } + else { + EntityEntry e = persistenceContext.removeEntry( object ); + if ( e != null ) { + EntityKey key = new EntityKey( e.getId(), e.getPersister(), source.getEntityMode() ); + persistenceContext.removeEntity( key ); + doEvict( object, key, e.getPersister(), source ); + } + + } + } + + protected void doEvict( + final Object object, + final EntityKey key, + final EntityPersister persister, + final EventSource session) throws HibernateException { + + if ( log.isTraceEnabled() ) { + log.trace( "evicting " + MessageHelper.infoString(persister) ); + } + + // remove all collections for the entity from the session-level cache + if ( persister.hasCollections() ) { + new EvictVisitor( session ).process( object, persister ); + } + + new Cascade( CascadingAction.EVICT, Cascade.AFTER_EVICT, session ) + .cascade( persister, object ); + } +} diff --git a/src/org/hibernate/event/def/DefaultFlushEntityEventListener.java b/src/org/hibernate/event/def/DefaultFlushEntityEventListener.java new file mode 100755 index 0000000000..78ce5ddf49 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultFlushEntityEventListener.java @@ -0,0 +1,528 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.StaleObjectStateException; +import org.hibernate.action.EntityUpdateAction; +import org.hibernate.action.DelayedPostInsertIdentifier; +import org.hibernate.classic.Validatable; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.Nullability; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.Status; +import org.hibernate.engine.Versioning; +import org.hibernate.event.EventSource; +import org.hibernate.event.FlushEntityEvent; +import org.hibernate.event.FlushEntityEventListener; +import org.hibernate.intercept.FieldInterceptionHelper; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; + +/** + * An event that occurs for each entity instance at flush time + * + * @author Gavin King + */ +public class DefaultFlushEntityEventListener implements FlushEntityEventListener { + + private static final Log log = LogFactory.getLog(DefaultFlushEntityEventListener.class); + + /** + * make sure user didn't mangle the id + */ + public void checkId(Object object, EntityPersister persister, Serializable id, EntityMode entityMode) + throws HibernateException { + + if ( id != null && id instanceof DelayedPostInsertIdentifier ) { + // this is a situation where the entity id is assigned by a post-insert generator + // and was saved outside the transaction forcing it to be delayed + return; + } + + if ( persister.canExtractIdOutOfEntity() ) { + + Serializable oid = persister.getIdentifier( object, entityMode ); + if (id==null) { + throw new AssertionFailure("null id in " + persister.getEntityName() + " entry (don't flush the Session after an exception occurs)"); + } + if ( !persister.getIdentifierType().isEqual(id, oid, entityMode) ) { + throw new HibernateException( + "identifier of an instance of " + + persister.getEntityName() + + " was altered from " + id + + " to " + oid + ); + } + } + + } + + private void checkNaturalId( + EntityPersister persister, + Serializable identifier, + Object[] current, + Object[] loaded, + EntityMode entityMode, + SessionImplementor session) { + if ( persister.hasNaturalIdentifier() ) { + Object[] snapshot = null; + Type[] types = persister.getPropertyTypes(); + int[] props = persister.getNaturalIdentifierProperties(); + boolean[] updateable = persister.getPropertyUpdateability(); + for ( int i=0; i + * If allowed to return nulls, then if the entity happens to be found in + * the session cache, we check the entity type for proper handling + * of entity hierarchies. + *

    + * If checkDeleted was set to true, then if the entity is found in the + * session-level cache, it's current status within the session cache + * is checked to see if it has previously been scheduled for deletion. + * + * @param event The load event + * @param keyToLoad The EntityKey representing the entity to be loaded. + * @param options The load options. + * @return The entity from the session-level cache, or null. + * @throws HibernateException Generally indicates problems applying a lock-mode. + */ + protected Object loadFromSessionCache( + final LoadEvent event, + final EntityKey keyToLoad, + final LoadEventListener.LoadType options) throws HibernateException { + + SessionImplementor session = event.getSession(); + Object old = session.getEntityUsingInterceptor( keyToLoad ); + + if ( old != null ) { + // this object was already loaded + EntityEntry oldEntry = session.getPersistenceContext().getEntry( old ); + if ( options.isCheckDeleted() ) { + Status status = oldEntry.getStatus(); + if ( status == Status.DELETED || status == Status.GONE ) { + return REMOVED_ENTITY_MARKER; + } + } + if ( options.isAllowNulls() ) { + EntityPersister persister = event.getSession().getFactory().getEntityPersister( event.getEntityClassName() ); + if ( ! persister.isInstance( old, event.getSession().getEntityMode() ) ) { + return INCONSISTENT_RTN_CLASS_MARKER; + } + } + upgradeLock( old, oldEntry, event.getLockMode(), session ); + } + + return old; + } + + /** + * Attempts to load the entity from the second-level cache. + * + * @param event The load event + * @param persister The persister for the entity being requested for load + * @param options The load options. + * @return The entity from the second-level cache, or null. + * @throws HibernateException + */ + protected Object loadFromSecondLevelCache( + final LoadEvent event, + final EntityPersister persister, + final LoadEventListener.LoadType options) throws HibernateException { + + final SessionImplementor source = event.getSession(); + + final boolean useCache = persister.hasCache() && + source.getCacheMode().isGetEnabled() && + event.getLockMode().lessThan(LockMode.READ); + + if (useCache) { + + final SessionFactoryImplementor factory = source.getFactory(); + + final CacheKey ck = new CacheKey( + event.getEntityId(), + persister.getIdentifierType(), + persister.getRootEntityName(), + source.getEntityMode(), + source.getFactory() + ); + Object ce = persister.getCache() + .get( ck, source.getTimestamp() ); + + if ( factory.getStatistics().isStatisticsEnabled() ) { + if (ce==null) { + factory.getStatisticsImplementor().secondLevelCacheMiss( + persister.getCache().getRegionName() + ); + } + else { + factory.getStatisticsImplementor().secondLevelCacheHit( + persister.getCache().getRegionName() + ); + } + } + + if ( ce != null ) { + + CacheEntry entry = (CacheEntry) persister.getCacheEntryStructure() + .destructure(ce, factory); + + // Entity was found in second-level cache... + return assembleCacheEntry( + entry, + event.getEntityId(), + persister, + event + ); + } + } + + return null; + } + + private Object assembleCacheEntry( + final CacheEntry entry, + final Serializable id, + final EntityPersister persister, + final LoadEvent event) throws HibernateException { + + final Object optionalObject = event.getInstanceToLoad(); + final EventSource session = event.getSession(); + final SessionFactoryImplementor factory = session.getFactory(); + + if ( log.isTraceEnabled() ) { + log.trace( + "assembling entity from second-level cache: " + + MessageHelper.infoString( persister, id, factory ) + ); + } + + EntityPersister subclassPersister = factory.getEntityPersister( entry.getSubclass() ); + Object result = optionalObject == null ? + session.instantiate( subclassPersister, id ) : optionalObject; + + // make it circular-reference safe + TwoPhaseLoad.addUninitializedCachedEntity( + new EntityKey( id, subclassPersister, session.getEntityMode() ), + result, + subclassPersister, + LockMode.NONE, + entry.areLazyPropertiesUnfetched(), + entry.getVersion(), + session + ); + + Type[] types = subclassPersister.getPropertyTypes(); + Object[] values = entry.assemble( result, id, subclassPersister, session.getInterceptor(), session ); // intializes result by side-effect + TypeFactory.deepCopy( + values, + types, + subclassPersister.getPropertyUpdateability(), + values, + session + ); + + Object version = Versioning.getVersion( values, subclassPersister ); + if ( log.isTraceEnabled() ) log.trace( "Cached Version: " + version ); + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + persistenceContext.addEntry( + result, + Status.MANAGED, + values, + null, + id, + version, + LockMode.NONE, + true, + subclassPersister, + false, + entry.areLazyPropertiesUnfetched() + ); + subclassPersister.afterInitialize( result, entry.areLazyPropertiesUnfetched(), session ); + persistenceContext.initializeNonLazyCollections(); + // upgrade the lock if necessary: + //lock(result, lockMode); + + //PostLoad is needed for EJB3 + //TODO: reuse the PostLoadEvent... + PostLoadEvent postLoadEvent = new PostLoadEvent(session).setEntity(result) + .setId(id).setPersister(persister); + PostLoadEventListener[] listeners = session.getListeners().getPostLoadEventListeners(); + for ( int i = 0; i < listeners.length; i++ ) { + listeners[i].onPostLoad(postLoadEvent); + } + + return result; + } + +} diff --git a/src/org/hibernate/event/def/DefaultLockEventListener.java b/src/org/hibernate/event/def/DefaultLockEventListener.java new file mode 100644 index 0000000000..a8bdd0b71c --- /dev/null +++ b/src/org/hibernate/event/def/DefaultLockEventListener.java @@ -0,0 +1,79 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.TransientObjectException; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.ForeignKeys; +import org.hibernate.event.EventSource; +import org.hibernate.event.LockEvent; +import org.hibernate.event.LockEventListener; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.EntityPersister; + +/** + * Defines the default lock event listeners used by hibernate to lock entities + * in response to generated lock events. + * + * @author Steve Ebersole + */ +public class DefaultLockEventListener extends AbstractLockUpgradeEventListener implements LockEventListener { + + /** Handle the given lock event. + * + * @param event The lock event to be handled. + * @throws HibernateException + */ + public void onLock(LockEvent event) throws HibernateException { + + if ( event.getObject() == null ) { + throw new NullPointerException( "attempted to lock null" ); + } + + if ( event.getLockMode() == LockMode.WRITE ) { + throw new HibernateException( "Invalid lock mode for lock()" ); + } + + SessionImplementor source = event.getSession(); + + Object entity = source.getPersistenceContext().unproxyAndReassociate( event.getObject() ); + //TODO: if object was an uninitialized proxy, this is inefficient, + // resulting in two SQL selects + + EntityEntry entry = source.getPersistenceContext().getEntry(entity); + if (entry==null) { + final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); + final Serializable id = persister.getIdentifier( entity, source.getEntityMode() ); + if ( !ForeignKeys.isNotTransient( event.getEntityName(), entity, Boolean.FALSE, source ) ) { + throw new TransientObjectException( + "cannot lock an unsaved transient instance: " + + persister.getEntityName() + ); + } + + entry = reassociate(event, entity, id, persister); + + cascadeOnLock(event, persister, entity); + } + + upgradeLock( entity, entry, event.getLockMode(), source ); + } + + private void cascadeOnLock(LockEvent event, EntityPersister persister, Object entity) { + EventSource source = event.getSession(); + source.getPersistenceContext().incrementCascadeLevel(); + try { + new Cascade(CascadingAction.LOCK, Cascade.AFTER_LOCK, source) + .cascade( persister, entity, event.getLockMode() ); + } + finally { + source.getPersistenceContext().decrementCascadeLevel(); + } + } + +} diff --git a/src/org/hibernate/event/def/DefaultMergeEventListener.java b/src/org/hibernate/event/def/DefaultMergeEventListener.java new file mode 100755 index 0000000000..5817ab7831 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultMergeEventListener.java @@ -0,0 +1,438 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.ObjectDeletedException; +import org.hibernate.StaleObjectStateException; +import org.hibernate.WrongClassException; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.event.EventSource; +import org.hibernate.event.MergeEvent; +import org.hibernate.event.MergeEventListener; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.intercept.FieldInterceptionHelper; +import org.hibernate.intercept.FieldInterceptor; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; +import org.hibernate.type.ForeignKeyDirection; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.IdentityMap; + +/** + * Defines the default copy event listener used by hibernate for copying entities + * in response to generated copy events. + * + * @author Gavin King + */ +public class DefaultMergeEventListener extends AbstractSaveEventListener + implements MergeEventListener { + + private static final Log log = LogFactory.getLog(DefaultMergeEventListener.class); + + protected Map getMergeMap(Object anything) { + return IdentityMap.invert( (Map) anything ); + } + + /** + * Handle the given merge event. + * + * @param event The merge event to be handled. + * @throws HibernateException + */ + public void onMerge(MergeEvent event) throws HibernateException { + onMerge( event, IdentityMap.instantiate(10) ); + } + + /** + * Handle the given merge event. + * + * @param event The merge event to be handled. + * @throws HibernateException + */ + public void onMerge(MergeEvent event, Map copyCache) throws HibernateException { + + final EventSource source = event.getSession(); + final Object original = event.getOriginal(); + + if ( original != null ) { + + final Object entity; + if ( original instanceof HibernateProxy ) { + LazyInitializer li = ( (HibernateProxy) original ).getHibernateLazyInitializer(); + if ( li.isUninitialized() ) { + log.trace("ignoring uninitialized proxy"); + event.setResult( source.load( li.getEntityName(), li.getIdentifier() ) ); + return; //EARLY EXIT! + } + else { + entity = li.getImplementation(); + } + } + else { + entity = original; + } + + if ( copyCache.containsKey(entity) ) { + log.trace("already merged"); + event.setResult(entity); + } + else { + event.setEntity( entity ); + int entityState = -1; + + // Check the persistence context for an entry relating to this + // entity to be merged... + EntityEntry entry = source.getPersistenceContext().getEntry( entity ); + if ( entry == null ) { + EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); + Serializable id = persister.getIdentifier( entity, source.getEntityMode() ); + if ( id != null ) { + EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); + Object managedEntity = source.getPersistenceContext().getEntity( key ); + entry = source.getPersistenceContext().getEntry( managedEntity ); + if ( entry != null ) { + // we have specialized case of a detached entity from the + // perspective of the merge operation. Specifically, we + // have an incoming entity instance which has a corresponding + // entry in the current persistence context, but registered + // under a different entity instance + entityState = DETACHED; + } + } + } + + if ( entityState == -1 ) { + entityState = getEntityState( entity, event.getEntityName(), entry, source ); + } + + switch (entityState) { + case DETACHED: + entityIsDetached(event, copyCache); + break; + case TRANSIENT: + entityIsTransient(event, copyCache); + break; + case PERSISTENT: + entityIsPersistent(event, copyCache); + break; + default: //DELETED + throw new ObjectDeletedException( + "deleted instance passed to merge", + null, + getLoggableName( event.getEntityName(), entity ) + ); + } + } + + } + + } + + protected void entityIsPersistent(MergeEvent event, Map copyCache) { + log.trace("ignoring persistent instance"); + + //TODO: check that entry.getIdentifier().equals(requestedId) + + final Object entity = event.getEntity(); + final EventSource source = event.getSession(); + final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); + + copyCache.put(entity, entity); //before cascade! + + cascadeOnMerge(source, persister, entity, copyCache); + copyValues(persister, entity, entity, source, copyCache); + + event.setResult(entity); + } + + protected void entityIsTransient(MergeEvent event, Map copyCache) { + + log.trace("merging transient instance"); + + final Object entity = event.getEntity(); + final EventSource source = event.getSession(); + + final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); + final String entityName = persister.getEntityName(); + + final Serializable id = persister.hasIdentifierProperty() ? + persister.getIdentifier( entity, source.getEntityMode() ) : + null; + + final Object copy = persister.instantiate( id, source.getEntityMode() ); //TODO: should this be Session.instantiate(Persister, ...)? + copyCache.put(entity, copy); //before cascade! + + // cascade first, so that all unsaved objects get their + // copy created before we actually copy + //cascadeOnMerge(event, persister, entity, copyCache, Cascades.CASCADE_BEFORE_MERGE); + super.cascadeBeforeSave(source, persister, entity, copyCache); + copyValues(persister, entity, copy, source, copyCache, ForeignKeyDirection.FOREIGN_KEY_FROM_PARENT); + + //this bit is only *really* absolutely necessary for handling + //requestedId, but is also good if we merge multiple object + //graphs, since it helps ensure uniqueness + final Serializable requestedId = event.getRequestedId(); + if (requestedId==null) { + saveWithGeneratedId( copy, entityName, copyCache, source, false ); + } + else { + saveWithRequestedId( copy, requestedId, entityName, copyCache, source ); + } + + // cascade first, so that all unsaved objects get their + // copy created before we actually copy + super.cascadeAfterSave(source, persister, entity, copyCache); + copyValues(persister, entity, copy, source, copyCache, ForeignKeyDirection.FOREIGN_KEY_TO_PARENT); + + event.setResult(copy); + + } + + protected void entityIsDetached(MergeEvent event, Map copyCache) { + + log.trace("merging detached instance"); + + final Object entity = event.getEntity(); + final EventSource source = event.getSession(); + + final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); + final String entityName = persister.getEntityName(); + + Serializable id = event.getRequestedId(); + if ( id == null ) { + id = persister.getIdentifier( entity, source.getEntityMode() ); + } + else { + // check that entity id = requestedId + Serializable entityId = persister.getIdentifier( entity, source.getEntityMode() ); + if ( !persister.getIdentifierType().isEqual( id, entityId, source.getEntityMode(), source.getFactory() ) ) { + throw new HibernateException( "merge requested with id not matching id of passed entity" ); + } + } + + String previousFetchProfile = source.getFetchProfile(); + source.setFetchProfile("merge"); + //we must clone embedded composite identifiers, or + //we will get back the same instance that we pass in + final Serializable clonedIdentifier = (Serializable) persister.getIdentifierType() + .deepCopy( id, source.getEntityMode(), source.getFactory() ); + final Object result = source.get(entityName, clonedIdentifier); + source.setFetchProfile(previousFetchProfile); + + if ( result == null ) { + //TODO: we should throw an exception if we really *know* for sure + // that this is a detached instance, rather than just assuming + //throw new StaleObjectStateException(entityName, id); + + // we got here because we assumed that an instance + // with an assigned id was detached, when it was + // really persistent + entityIsTransient(event, copyCache); + } + else { + copyCache.put(entity, result); //before cascade! + + final Object target = source.getPersistenceContext().unproxy(result); + if ( target == entity ) { + throw new AssertionFailure("entity was not detached"); + } + else if ( !source.getEntityName(target).equals(entityName) ) { + throw new WrongClassException( + "class of the given object did not match class of persistent copy", + event.getRequestedId(), + entityName + ); + } + else if ( isVersionChanged( entity, source, persister, target ) ) { + if ( source.getFactory().getStatistics().isStatisticsEnabled() ) { + source.getFactory().getStatisticsImplementor() + .optimisticFailure( entityName ); + } + throw new StaleObjectStateException( entityName, id ); + } + + // cascade first, so that all unsaved objects get their + // copy created before we actually copy + cascadeOnMerge(source, persister, entity, copyCache); + copyValues(persister, entity, target, source, copyCache); + + //copyValues works by reflection, so explicitly mark the entity instance dirty + markInterceptorDirty( entity, target ); + + event.setResult(result); + } + + } + + private void markInterceptorDirty(final Object entity, final Object target) { + if ( FieldInterceptionHelper.isInstrumented( entity ) ) { + FieldInterceptor interceptor = FieldInterceptionHelper.extractFieldInterceptor( target ); + if ( interceptor != null ) { + interceptor.dirty(); + } + } + } + + private boolean isVersionChanged(Object entity, EventSource source, EntityPersister persister, Object target) { + if ( ! persister.isVersioned() ) { + return false; + } + // for merging of versioned entities, we consider the version having + // been changed only when: + // 1) the two version values are different; + // *AND* + // 2) The target actually represents database state! + // + // This second condition is a special case which allows + // an entity to be merged during the same transaction + // (though during a seperate operation) in which it was + // originally persisted/saved + boolean changed = ! persister.getVersionType().isSame( + persister.getVersion( target, source.getEntityMode() ), + persister.getVersion( entity, source.getEntityMode() ), + source.getEntityMode() + ); + + // TODO : perhaps we should additionally require that the incoming entity + // version be equivalent to the defined unsaved-value? + return changed && existsInDatabase( target, source, persister ); + } + + private boolean existsInDatabase(Object entity, EventSource source, EntityPersister persister) { + EntityEntry entry = source.getPersistenceContext().getEntry( entity ); + if ( entry == null ) { + Serializable id = persister.getIdentifier( entity, source.getEntityMode() ); + if ( id != null ) { + EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); + Object managedEntity = source.getPersistenceContext().getEntity( key ); + entry = source.getPersistenceContext().getEntry( managedEntity ); + } + } + + if ( entry == null ) { + // perhaps this should be an exception since it is only ever used + // in the above method? + return false; + } + else { + return entry.isExistsInDatabase(); + } + } + + protected void copyValues( + final EntityPersister persister, + final Object entity, + final Object target, + final SessionImplementor source, + final Map copyCache + ) { + + final Object[] copiedValues = TypeFactory.replace( + persister.getPropertyValues( entity, source.getEntityMode() ), + persister.getPropertyValues( target, source.getEntityMode() ), + persister.getPropertyTypes(), + source, + target, + copyCache + ); + + persister.setPropertyValues( target, copiedValues, source.getEntityMode() ); + } + + protected void copyValues( + final EntityPersister persister, + final Object entity, + final Object target, + final SessionImplementor source, + final Map copyCache, + final ForeignKeyDirection foreignKeyDirection) { + + final Object[] copiedValues; + + if ( foreignKeyDirection == ForeignKeyDirection.FOREIGN_KEY_TO_PARENT ) { + // this is the second pass through on a merge op, so here we limit the + // replacement to associations types (value types were already replaced + // during the first pass) + copiedValues = TypeFactory.replaceAssociations( + persister.getPropertyValues( entity, source.getEntityMode() ), + persister.getPropertyValues( target, source.getEntityMode() ), + persister.getPropertyTypes(), + source, + target, + copyCache, + foreignKeyDirection + ); + } + else { + copiedValues = TypeFactory.replace( + persister.getPropertyValues( entity, source.getEntityMode() ), + persister.getPropertyValues( target, source.getEntityMode() ), + persister.getPropertyTypes(), + source, + target, + copyCache, + foreignKeyDirection + ); + } + + persister.setPropertyValues( target, copiedValues, source.getEntityMode() ); + } + + /** + * Perform any cascades needed as part of this copy event. + * + * @param source The merge event being processed. + * @param persister The persister of the entity being copied. + * @param entity The entity being copied. + * @param copyCache A cache of already copied instance. + */ + protected void cascadeOnMerge( + final EventSource source, + final EntityPersister persister, + final Object entity, + final Map copyCache + ) { + source.getPersistenceContext().incrementCascadeLevel(); + try { + new Cascade( getCascadeAction(), Cascade.BEFORE_MERGE, source ) + .cascade(persister, entity, copyCache); + } + finally { + source.getPersistenceContext().decrementCascadeLevel(); + } + } + + + protected CascadingAction getCascadeAction() { + return CascadingAction.MERGE; + } + + protected Boolean getAssumedUnsaved() { + return Boolean.FALSE; + } + + /** + * Cascade behavior is redefined by this subclass, disable superclass behavior + */ + protected void cascadeAfterSave(EventSource source, EntityPersister persister, Object entity, Object anything) + throws HibernateException { + } + + /** + * Cascade behavior is redefined by this subclass, disable superclass behavior + */ + protected void cascadeBeforeSave(EventSource source, EntityPersister persister, Object entity, Object anything) + throws HibernateException { + } + +} diff --git a/src/org/hibernate/event/def/DefaultPersistEventListener.java b/src/org/hibernate/event/def/DefaultPersistEventListener.java new file mode 100755 index 0000000000..f62bd4a401 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultPersistEventListener.java @@ -0,0 +1,144 @@ +// $Id$ +package org.hibernate.event.def; + +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.ObjectDeletedException; +import org.hibernate.PersistentObjectException; +import org.hibernate.engine.CascadingAction; +import org.hibernate.event.EventSource; +import org.hibernate.event.PersistEvent; +import org.hibernate.event.PersistEventListener; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; +import org.hibernate.util.IdentityMap; + +/** + * Defines the default create event listener used by hibernate for creating + * transient entities in response to generated create events. + * + * @author Gavin King + */ +public class DefaultPersistEventListener extends AbstractSaveEventListener implements PersistEventListener { + + private static final Log log = LogFactory.getLog(DefaultPersistEventListener.class); + + /** + * Handle the given create event. + * + * @param event The create event to be handled. + * @throws HibernateException + */ + public void onPersist(PersistEvent event) throws HibernateException { + onPersist( event, IdentityMap.instantiate(10) ); + } + + + /** + * Handle the given create event. + * + * @param event The create event to be handled. + * @throws HibernateException + */ + public void onPersist(PersistEvent event, Map createCache) throws HibernateException { + + final SessionImplementor source = event.getSession(); + final Object object = event.getObject(); + + final Object entity; + if (object instanceof HibernateProxy) { + LazyInitializer li = ( (HibernateProxy) object ).getHibernateLazyInitializer(); + if ( li.isUninitialized() ) { + if ( li.getSession()==source ) { + return; //NOTE EARLY EXIT! + } + else { + throw new PersistentObjectException("uninitialized proxy passed to persist()"); + } + } + entity = li.getImplementation(); + } + else { + entity = object; + } + + int entityState = getEntityState( + entity, + event.getEntityName(), + source.getPersistenceContext().getEntry(entity), + source + ); + + switch (entityState) { + case DETACHED: + throw new PersistentObjectException( + "detached entity passed to persist: " + + getLoggableName( event.getEntityName(), entity ) + ); + case PERSISTENT: + entityIsPersistent(event, createCache); + break; + case TRANSIENT: + entityIsTransient(event, createCache); + break; + default: + throw new ObjectDeletedException( + "deleted entity passed to persist", + null, + getLoggableName( event.getEntityName(), entity ) + ); + } + + } + + protected void entityIsPersistent(PersistEvent event, Map createCache) { + log.trace("ignoring persistent instance"); + final EventSource source = event.getSession(); + + //TODO: check that entry.getIdentifier().equals(requestedId) + + final Object entity = source.getPersistenceContext().unproxy( event.getObject() ); + final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); + + if ( createCache.put(entity, entity)==null ) { + //TODO: merge into one method! + cascadeBeforeSave(source, persister, entity, createCache); + cascadeAfterSave(source, persister, entity, createCache); + } + + } + + /** + * Handle the given create event. + * + * @param event The save event to be handled. + * @throws HibernateException + */ + protected void entityIsTransient(PersistEvent event, Map createCache) throws HibernateException { + + log.trace("saving transient instance"); + + final EventSource source = event.getSession(); + + final Object entity = source.getPersistenceContext().unproxy( event.getObject() ); + + if ( createCache.put(entity, entity)==null ) { + saveWithGeneratedId( entity, event.getEntityName(), createCache, source, false ); + } + + } + + protected CascadingAction getCascadeAction() { + return CascadingAction.PERSIST; + } + + protected Boolean getAssumedUnsaved() { + return Boolean.TRUE; + } + +} diff --git a/src/org/hibernate/event/def/DefaultPersistOnFlushEventListener.java b/src/org/hibernate/event/def/DefaultPersistOnFlushEventListener.java new file mode 100644 index 0000000000..fa5aa827cd --- /dev/null +++ b/src/org/hibernate/event/def/DefaultPersistOnFlushEventListener.java @@ -0,0 +1,14 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.engine.CascadingAction; + +/** + * When persist is used as the cascade action, persistOnFlush should be used + * @author Emmanuel Bernard + */ +public class DefaultPersistOnFlushEventListener extends DefaultPersistEventListener { + protected CascadingAction getCascadeAction() { + return CascadingAction.PERSIST_ON_FLUSH; + } +} diff --git a/src/org/hibernate/event/def/DefaultPostLoadEventListener.java b/src/org/hibernate/event/def/DefaultPostLoadEventListener.java new file mode 100644 index 0000000000..a136f18469 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultPostLoadEventListener.java @@ -0,0 +1,22 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.classic.Lifecycle; +import org.hibernate.event.PostLoadEvent; +import org.hibernate.event.PostLoadEventListener; + +/** + * Call Lifecycle interface if necessary + * + * @author Gavin King + */ +public class DefaultPostLoadEventListener implements PostLoadEventListener { + + public void onPostLoad(PostLoadEvent event) { + if ( event.getPersister().implementsLifecycle( event.getSession().getEntityMode() ) ) { + //log.debug( "calling onLoad()" ); + ( ( Lifecycle ) event.getEntity() ).onLoad( event.getSession(), event.getId() ); + } + + } +} diff --git a/src/org/hibernate/event/def/DefaultPreLoadEventListener.java b/src/org/hibernate/event/def/DefaultPreLoadEventListener.java new file mode 100755 index 0000000000..273eecfe1d --- /dev/null +++ b/src/org/hibernate/event/def/DefaultPreLoadEventListener.java @@ -0,0 +1,29 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.event.PreLoadEvent; +import org.hibernate.event.PreLoadEventListener; +import org.hibernate.persister.entity.EntityPersister; + +/** + * Called before injecting property values into a newly + * loaded entity instance. + * + * @author Gavin King + */ +public class DefaultPreLoadEventListener implements PreLoadEventListener { + + public void onPreLoad(PreLoadEvent event) { + EntityPersister persister = event.getPersister(); + event.getSession() + .getInterceptor() + .onLoad( + event.getEntity(), + event.getId(), + event.getState(), + persister.getPropertyNames(), + persister.getPropertyTypes() + ); + } + +} diff --git a/src/org/hibernate/event/def/DefaultRefreshEventListener.java b/src/org/hibernate/event/def/DefaultRefreshEventListener.java new file mode 100644 index 0000000000..f2a0cb1b14 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultRefreshEventListener.java @@ -0,0 +1,151 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.PersistentObjectException; +import org.hibernate.UnresolvableObjectException; +import org.hibernate.cache.CacheKey; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.event.RefreshEvent; +import org.hibernate.event.RefreshEventListener; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.Type; +import org.hibernate.util.IdentityMap; + +/** + * Defines the default refresh event listener used by hibernate for refreshing entities + * in response to generated refresh events. + * + * @author Steve Ebersole + */ +public class DefaultRefreshEventListener implements RefreshEventListener { + + private static final Log log = LogFactory.getLog(DefaultRefreshEventListener.class); + + public void onRefresh(RefreshEvent event) throws HibernateException { + onRefresh( event, IdentityMap.instantiate(10) ); + } + + /** + * Handle the given refresh event. + * + * @param event The refresh event to be handled. + * @throws HibernateException + */ + public void onRefresh(RefreshEvent event, Map refreshedAlready) throws HibernateException { + + final EventSource source = event.getSession(); + + if ( source.getPersistenceContext().reassociateIfUninitializedProxy( event.getObject() ) ) return; + + final Object object = source.getPersistenceContext().unproxyAndReassociate( event.getObject() ); + + if ( refreshedAlready.containsKey(object) ) { + log.trace("already refreshed"); + return; + } + + final EntityEntry e = source.getPersistenceContext().getEntry( object ); + final EntityPersister persister; + final Serializable id; + + if ( e == null ) { + persister = source.getEntityPersister(null, object); //refresh() does not pass an entityName + id = persister.getIdentifier( object, event.getSession().getEntityMode() ); + if ( log.isTraceEnabled() ) { + log.trace( + "refreshing transient " + + MessageHelper.infoString( persister, id, source.getFactory() ) + ); + } + EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); + if ( source.getPersistenceContext().getEntry(key) != null ) { + throw new PersistentObjectException( + "attempted to refresh transient instance when persistent instance was already associated with the Session: " + + MessageHelper.infoString(persister, id, source.getFactory() ) + ); + } + } + else { + if ( log.isTraceEnabled() ) { + log.trace( + "refreshing " + + MessageHelper.infoString( e.getPersister(), e.getId(), source.getFactory() ) + ); + } + if ( !e.isExistsInDatabase() ) { + throw new HibernateException( "this instance does not yet exist as a row in the database" ); + } + + persister = e.getPersister(); + id = e.getId(); + } + + // cascade the refresh prior to refreshing this entity + refreshedAlready.put(object, object); + new Cascade(CascadingAction.REFRESH, Cascade.BEFORE_REFRESH, source) + .cascade( persister, object, refreshedAlready ); + + if ( e != null ) { + EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); + source.getPersistenceContext().removeEntity(key); + if ( persister.hasCollections() ) new EvictVisitor( source ).process(object, persister); + } + + if ( persister.hasCache() ) { + final CacheKey ck = new CacheKey( + id, + persister.getIdentifierType(), + persister.getRootEntityName(), + source.getEntityMode(), + source.getFactory() + ); + persister.getCache().remove(ck); + } + + evictCachedCollections( persister, id, source.getFactory() ); + + String previousFetchProfile = source.getFetchProfile(); + source.setFetchProfile("refresh"); + Object result = persister.load( id, object, event.getLockMode(), source ); + source.setFetchProfile(previousFetchProfile); + + UnresolvableObjectException.throwIfNull( result, id, persister.getEntityName() ); + + } + + /** + * Evict collections from the factory-level cache + */ + private void evictCachedCollections(EntityPersister persister, Serializable id, SessionFactoryImplementor factory) + throws HibernateException { + evictCachedCollections( persister.getPropertyTypes(), id, factory ); + } + + private void evictCachedCollections(Type[] types, Serializable id, SessionFactoryImplementor factory) + throws HibernateException { + for ( int i = 0; i < types.length; i++ ) { + if ( types[i].isCollectionType() ) { + factory.evictCollection( ( (CollectionType) types[i] ).getRole(), id ); + } + else if ( types[i].isComponentType() ) { + AbstractComponentType actype = (AbstractComponentType) types[i]; + evictCachedCollections( actype.getSubtypes(), id, factory ); + } + } + } + +} diff --git a/src/org/hibernate/event/def/DefaultReplicateEventListener.java b/src/org/hibernate/event/def/DefaultReplicateEventListener.java new file mode 100644 index 0000000000..f0fcca54f8 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultReplicateEventListener.java @@ -0,0 +1,206 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.HibernateException; +import org.hibernate.TransientObjectException; +import org.hibernate.ReplicationMode; +import org.hibernate.LockMode; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.Status; +import org.hibernate.event.EventSource; +import org.hibernate.event.ReplicateEvent; +import org.hibernate.event.ReplicateEventListener; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.Type; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Defines the default replicate event listener used by Hibernate to replicate + * entities in response to generated replicate events. + * + * @author Steve Ebersole + */ +public class DefaultReplicateEventListener extends AbstractSaveEventListener implements ReplicateEventListener { + + private static final Log log = LogFactory.getLog( DefaultReplicateEventListener.class ); + + /** + * Handle the given replicate event. + * + * @param event The replicate event to be handled. + * + * @throws TransientObjectException An invalid attempt to replicate a transient entity. + */ + public void onReplicate(ReplicateEvent event) { + final EventSource source = event.getSession(); + if ( source.getPersistenceContext().reassociateIfUninitializedProxy( event.getObject() ) ) { + log.trace( "uninitialized proxy passed to replicate()" ); + return; + } + + Object entity = source.getPersistenceContext().unproxyAndReassociate( event.getObject() ); + + if ( source.getPersistenceContext().isEntryFor( entity ) ) { + log.trace( "ignoring persistent instance passed to replicate()" ); + //hum ... should we cascade anyway? throw an exception? fine like it is? + return; + } + + EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); + + // get the id from the object + /*if ( persister.isUnsaved(entity, source) ) { + throw new TransientObjectException("transient instance passed to replicate()"); + }*/ + Serializable id = persister.getIdentifier( entity, source.getEntityMode() ); + if ( id == null ) { + throw new TransientObjectException( "instance with null id passed to replicate()" ); + } + + final ReplicationMode replicationMode = event.getReplicationMode(); + + final Object oldVersion; + if ( replicationMode == ReplicationMode.EXCEPTION ) { + //always do an INSERT, and let it fail by constraint violation + oldVersion = null; + } + else { + //what is the version on the database? + oldVersion = persister.getCurrentVersion( id, source ); + } + + if ( oldVersion != null ) { + if ( log.isTraceEnabled() ) { + log.trace( + "found existing row for " + + MessageHelper.infoString( persister, id, source.getFactory() ) + ); + } + + /// HHH-2378 + final Object realOldVersion = persister.isVersioned() ? oldVersion : null; + + boolean canReplicate = replicationMode.shouldOverwriteCurrentVersion( + entity, + realOldVersion, + persister.getVersion( entity, source.getEntityMode() ), + persister.getVersionType() + ); + + if ( canReplicate ) { + //will result in a SQL UPDATE: + performReplication( entity, id, realOldVersion, persister, replicationMode, source ); + } + else { + //else do nothing (don't even reassociate object!) + log.trace( "no need to replicate" ); + } + + //TODO: would it be better to do a refresh from db? + } + else { + // no existing row - do an insert + if ( log.isTraceEnabled() ) { + log.trace( + "no existing row, replicating new instance " + + MessageHelper.infoString( persister, id, source.getFactory() ) + ); + } + + final boolean regenerate = persister.isIdentifierAssignedByInsert(); // prefer re-generation of identity! + final EntityKey key = regenerate ? + null : new EntityKey( id, persister, source.getEntityMode() ); + + performSaveOrReplicate( + entity, + key, + persister, + regenerate, + replicationMode, + source, + true + ); + + } + } + + protected boolean visitCollectionsBeforeSave(Object entity, Serializable id, Object[] values, Type[] types, EventSource source) { + //TODO: we use two visitors here, inefficient! + OnReplicateVisitor visitor = new OnReplicateVisitor( source, id, entity, false ); + visitor.processEntityPropertyValues( values, types ); + return super.visitCollectionsBeforeSave( entity, id, values, types, source ); + } + + protected boolean substituteValuesIfNecessary( + Object entity, + Serializable id, + Object[] values, + EntityPersister persister, + SessionImplementor source) { + return false; + } + + protected boolean isVersionIncrementDisabled() { + return true; + } + + private void performReplication( + Object entity, + Serializable id, + Object version, + EntityPersister persister, + ReplicationMode replicationMode, + EventSource source) throws HibernateException { + + if ( log.isTraceEnabled() ) { + log.trace( + "replicating changes to " + + MessageHelper.infoString( persister, id, source.getFactory() ) + ); + } + + new OnReplicateVisitor( source, id, entity, true ).process( entity, persister ); + + source.getPersistenceContext().addEntity( + entity, + Status.MANAGED, + null, + new EntityKey( id, persister, source.getEntityMode() ), + version, + LockMode.NONE, + true, + persister, + true, + false + ); + + cascadeAfterReplicate( entity, persister, replicationMode, source ); + } + + private void cascadeAfterReplicate( + Object entity, + EntityPersister persister, + ReplicationMode replicationMode, + EventSource source) { + source.getPersistenceContext().incrementCascadeLevel(); + try { + new Cascade( CascadingAction.REPLICATE, Cascade.AFTER_UPDATE, source ) + .cascade( persister, entity, replicationMode ); + } + finally { + source.getPersistenceContext().decrementCascadeLevel(); + } + } + + protected CascadingAction getCascadeAction() { + return CascadingAction.REPLICATE; + } +} diff --git a/src/org/hibernate/event/def/DefaultSaveEventListener.java b/src/org/hibernate/event/def/DefaultSaveEventListener.java new file mode 100644 index 0000000000..9c0dd39cd2 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultSaveEventListener.java @@ -0,0 +1,57 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.hibernate.Hibernate; +import org.hibernate.PersistentObjectException; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.Status; +import org.hibernate.event.SaveOrUpdateEvent; +import org.hibernate.engine.SessionImplementor; + +/** + * An event handler for save() events + * @author Gavin King + */ +public class DefaultSaveEventListener extends DefaultSaveOrUpdateEventListener { + + protected Serializable performSaveOrUpdate(SaveOrUpdateEvent event) { + // this implementation is supposed to tolerate incorrect unsaved-value + // mappings, for the purpose of backward-compatibility + EntityEntry entry = event.getSession().getPersistenceContext().getEntry( event.getEntity() ); + if ( entry!=null && entry.getStatus() != Status.DELETED ) { + return entityIsPersistent(event); + } + else { + return entityIsTransient(event); + } + } + + protected Serializable saveWithGeneratedOrRequestedId(SaveOrUpdateEvent event) { + if ( event.getRequestedId() == null ) { + return super.saveWithGeneratedOrRequestedId(event); + } + else { + return saveWithRequestedId( + event.getEntity(), + event.getRequestedId(), + event.getEntityName(), + null, + event.getSession() + ); + } + + } + + protected boolean reassociateIfUninitializedProxy(Object object, SessionImplementor source) { + if ( !Hibernate.isInitialized(object) ) { + throw new PersistentObjectException("uninitialized proxy passed to save()"); + } + else { + return false; + } + } + + +} diff --git a/src/org/hibernate/event/def/DefaultSaveOrUpdateCopyEventListener.java b/src/org/hibernate/event/def/DefaultSaveOrUpdateCopyEventListener.java new file mode 100755 index 0000000000..a2ff9d2569 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultSaveOrUpdateCopyEventListener.java @@ -0,0 +1,12 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.engine.CascadingAction; + +public class DefaultSaveOrUpdateCopyEventListener extends DefaultMergeEventListener { + + protected CascadingAction getCascadeAction() { + return CascadingAction.SAVE_UPDATE_COPY; + } + +} diff --git a/src/org/hibernate/event/def/DefaultSaveOrUpdateEventListener.java b/src/org/hibernate/event/def/DefaultSaveOrUpdateEventListener.java new file mode 100755 index 0000000000..de0f47b11e --- /dev/null +++ b/src/org/hibernate/event/def/DefaultSaveOrUpdateEventListener.java @@ -0,0 +1,368 @@ +// $Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.AssertionFailure; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.PersistentObjectException; +import org.hibernate.TransientObjectException; +import org.hibernate.classic.Lifecycle; +import org.hibernate.engine.Cascade; +import org.hibernate.engine.CascadingAction; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.Status; +import org.hibernate.event.EventSource; +import org.hibernate.event.SaveOrUpdateEvent; +import org.hibernate.event.SaveOrUpdateEventListener; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.proxy.HibernateProxy; + +/** + * Defines the default listener used by Hibernate for handling save-update + * events. + * + * @author Steve Ebersole + * @author Gavin King + */ +public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener implements SaveOrUpdateEventListener { + + private static final Log log = LogFactory.getLog( DefaultSaveOrUpdateEventListener.class ); + + /** + * Handle the given update event. + * + * @param event The update event to be handled. + */ + public void onSaveOrUpdate(SaveOrUpdateEvent event) { + final SessionImplementor source = event.getSession(); + final Object object = event.getObject(); + final Serializable requestedId = event.getRequestedId(); + + if ( requestedId != null ) { + //assign the requested id to the proxy, *before* + //reassociating the proxy + if ( object instanceof HibernateProxy ) { + ( ( HibernateProxy ) object ).getHibernateLazyInitializer().setIdentifier( requestedId ); + } + } + + if ( reassociateIfUninitializedProxy( object, source ) ) { + log.trace( "reassociated uninitialized proxy" ); + // an uninitialized proxy, noop, don't even need to + // return an id, since it is never a save() + } + else { + //initialize properties of the event: + final Object entity = source.getPersistenceContext().unproxyAndReassociate( object ); + event.setEntity( entity ); + event.setEntry( source.getPersistenceContext().getEntry( entity ) ); + //return the id in the event object + event.setResultId( performSaveOrUpdate( event ) ); + } + + } + + protected boolean reassociateIfUninitializedProxy(Object object, SessionImplementor source) { + return source.getPersistenceContext().reassociateIfUninitializedProxy( object ); + } + + protected Serializable performSaveOrUpdate(SaveOrUpdateEvent event) { + int entityState = getEntityState( + event.getEntity(), + event.getEntityName(), + event.getEntry(), + event.getSession() + ); + + switch ( entityState ) { + case DETACHED: + entityIsDetached( event ); + return null; + case PERSISTENT: + return entityIsPersistent( event ); + default: //TRANSIENT or DELETED + return entityIsTransient( event ); + } + } + + protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException { + log.trace( "ignoring persistent instance" ); + + EntityEntry entityEntry = event.getEntry(); + if ( entityEntry == null ) { + throw new AssertionFailure( "entity was transient or detached" ); + } + else { + + if ( entityEntry.getStatus() == Status.DELETED ) { + throw new AssertionFailure( "entity was deleted" ); + } + + final SessionFactoryImplementor factory = event.getSession().getFactory(); + + Serializable requestedId = event.getRequestedId(); + + Serializable savedId; + if ( requestedId == null ) { + savedId = entityEntry.getId(); + } + else { + + final boolean isEqual = !entityEntry.getPersister().getIdentifierType() + .isEqual( requestedId, entityEntry.getId(), event.getSession().getEntityMode(), factory ); + + if ( isEqual ) { + throw new PersistentObjectException( + "object passed to save() was already persistent: " + + MessageHelper.infoString( entityEntry.getPersister(), requestedId, factory ) + ); + } + + savedId = requestedId; + + } + + if ( log.isTraceEnabled() ) { + log.trace( + "object already associated with session: " + + MessageHelper.infoString( entityEntry.getPersister(), savedId, factory ) + ); + } + + return savedId; + + } + } + + /** + * The given save-update event named a transient entity. + *

    + * Here, we will perform the save processing. + * + * @param event The save event to be handled. + * + * @return The entity's identifier after saving. + */ + protected Serializable entityIsTransient(SaveOrUpdateEvent event) { + + log.trace( "saving transient instance" ); + + final EventSource source = event.getSession(); + + EntityEntry entityEntry = event.getEntry(); + if ( entityEntry != null ) { + if ( entityEntry.getStatus() == Status.DELETED ) { + source.forceFlush( entityEntry ); + } + else { + throw new AssertionFailure( "entity was persistent" ); + } + } + + Serializable id = saveWithGeneratedOrRequestedId( event ); + + source.getPersistenceContext().reassociateProxy( event.getObject(), id ); + + return id; + } + + /** + * Save the transient instance, assigning the right identifier + * + * @param event The initiating event. + * + * @return The entity's identifier value after saving. + */ + protected Serializable saveWithGeneratedOrRequestedId(SaveOrUpdateEvent event) { + return saveWithGeneratedId( + event.getEntity(), + event.getEntityName(), + null, + event.getSession(), + true + ); + } + + /** + * The given save-update event named a detached entity. + *

    + * Here, we will perform the update processing. + * + * @param event The update event to be handled. + */ + protected void entityIsDetached(SaveOrUpdateEvent event) { + + log.trace( "updating detached instance" ); + + + if ( event.getSession().getPersistenceContext().isEntryFor( event.getEntity() ) ) { + //TODO: assertion only, could be optimized away + throw new AssertionFailure( "entity was persistent" ); + } + + Object entity = event.getEntity(); + + EntityPersister persister = event.getSession().getEntityPersister( event.getEntityName(), entity ); + + event.setRequestedId( + getUpdateId( + entity, persister, event.getRequestedId(), event.getSession().getEntityMode() + ) + ); + + performUpdate( event, entity, persister ); + + } + + /** + * Determine the id to use for updating. + * + * @param entity The entity. + * @param persister The entity persister + * @param requestedId The requested identifier + * @param entityMode The entity mode. + * + * @return The id. + * + * @throws TransientObjectException If the entity is considered transient. + */ + protected Serializable getUpdateId( + Object entity, + EntityPersister persister, + Serializable requestedId, + EntityMode entityMode) { + // use the id assigned to the instance + Serializable id = persister.getIdentifier( entity, entityMode ); + if ( id == null ) { + // assume this is a newly instantiated transient object + // which should be saved rather than updated + throw new TransientObjectException( + "The given object has a null identifier: " + + persister.getEntityName() + ); + } + else { + return id; + } + + } + + protected void performUpdate( + SaveOrUpdateEvent event, + Object entity, + EntityPersister persister) throws HibernateException { + + if ( !persister.isMutable() ) { + log.trace( "immutable instance passed to doUpdate(), locking" ); + reassociate( event, entity, event.getRequestedId(), persister ); + } + else { + + if ( log.isTraceEnabled() ) { + log.trace( + "updating " + + MessageHelper.infoString( + persister, event.getRequestedId(), event.getSession().getFactory() + ) + ); + } + + final EventSource source = event.getSession(); + + EntityKey key = new EntityKey( event.getRequestedId(), persister, source.getEntityMode() ); + + source.getPersistenceContext().checkUniqueness( key, entity ); + + if ( invokeUpdateLifecycle( entity, persister, source ) ) { + reassociate( event, event.getObject(), event.getRequestedId(), persister ); + return; + } + + // this is a transient object with existing persistent state not loaded by the session + + new OnUpdateVisitor( source, event.getRequestedId(), entity ).process( entity, persister ); + + //TODO: put this stuff back in to read snapshot from + // the second-level cache (needs some extra work) + /*Object[] cachedState = null; + + if ( persister.hasCache() ) { + CacheEntry entry = (CacheEntry) persister.getCache() + .get( event.getRequestedId(), source.getTimestamp() ); + cachedState = entry==null ? + null : + entry.getState(); //TODO: half-assemble this stuff + }*/ + + source.getPersistenceContext().addEntity( + entity, + Status.MANAGED, + null, //cachedState, + key, + persister.getVersion( entity, source.getEntityMode() ), + LockMode.NONE, + true, + persister, + false, + true //assume true, since we don't really know, and it doesn't matter + ); + + persister.afterReassociate( entity, source ); + + if ( log.isTraceEnabled() ) { + log.trace( + "updating " + + MessageHelper.infoString( persister, event.getRequestedId(), source.getFactory() ) + ); + } + + cascadeOnUpdate( event, persister, entity ); + + } + } + + protected boolean invokeUpdateLifecycle(Object entity, EntityPersister persister, EventSource source) { + if ( persister.implementsLifecycle( source.getEntityMode() ) ) { + log.debug( "calling onUpdate()" ); + if ( ( ( Lifecycle ) entity ).onUpdate( source ) ) { + log.debug( "update vetoed by onUpdate()" ); + return true; + } + } + return false; + } + + /** + * Handles the calls needed to perform cascades as part of an update request + * for the given entity. + * + * @param event The event currently being processed. + * @param persister The defined persister for the entity being updated. + * @param entity The entity being updated. + */ + private void cascadeOnUpdate(SaveOrUpdateEvent event, EntityPersister persister, Object entity) { + EventSource source = event.getSession(); + source.getPersistenceContext().incrementCascadeLevel(); + try { + new Cascade( CascadingAction.SAVE_UPDATE, Cascade.AFTER_UPDATE, source ) + .cascade( persister, entity ); + } + finally { + source.getPersistenceContext().decrementCascadeLevel(); + } + } + + protected CascadingAction getCascadeAction() { + return CascadingAction.SAVE_UPDATE; + } +} diff --git a/src/org/hibernate/event/def/DefaultUpdateEventListener.java b/src/org/hibernate/event/def/DefaultUpdateEventListener.java new file mode 100644 index 0000000000..e5193e06a5 --- /dev/null +++ b/src/org/hibernate/event/def/DefaultUpdateEventListener.java @@ -0,0 +1,54 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.ObjectDeletedException; +import org.hibernate.EntityMode; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.Status; +import org.hibernate.event.SaveOrUpdateEvent; +import org.hibernate.persister.entity.EntityPersister; + +/** + * An event handler for update() events + * @author Gavin King + */ +public class DefaultUpdateEventListener extends DefaultSaveOrUpdateEventListener { + + protected Serializable performSaveOrUpdate(SaveOrUpdateEvent event) { + // this implementation is supposed to tolerate incorrect unsaved-value + // mappings, for the purpose of backward-compatibility + EntityEntry entry = event.getSession().getPersistenceContext().getEntry( event.getEntity() ); + if ( entry!=null ) { + if ( entry.getStatus()==Status.DELETED ) { + throw new ObjectDeletedException( "deleted instance passed to update()", null, event.getEntityName() ); + } + else { + return entityIsPersistent(event); + } + } + else { + entityIsDetached(event); + return null; + } + } + + /** + * If the user specified an id, assign it to the instance and use that, + * otherwise use the id already assigned to the instance + */ + protected Serializable getUpdateId(Object entity, EntityPersister persister, Serializable requestedId, EntityMode entityMode) + throws HibernateException { + + if ( requestedId==null ) { + return super.getUpdateId(entity, persister, requestedId, entityMode); + } + else { + persister.setIdentifier(entity, requestedId, entityMode); + return requestedId; + } + } + +} diff --git a/src/org/hibernate/event/def/DirtyCollectionSearchVisitor.java b/src/org/hibernate/event/def/DirtyCollectionSearchVisitor.java new file mode 100644 index 0000000000..abdf1dabab --- /dev/null +++ b/src/org/hibernate/event/def/DirtyCollectionSearchVisitor.java @@ -0,0 +1,66 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.type.CollectionType; + +/** + * Do we have a dirty collection here? + * 1. if it is a new application-instantiated collection, return true (does not occur anymore!) + * 2. if it is a component, recurse + * 3. if it is a wrappered collection, ask the collection entry + * + * @author Gavin King + */ +public class DirtyCollectionSearchVisitor extends AbstractVisitor { + + private boolean dirty = false; + private boolean[] propertyVersionability; + + DirtyCollectionSearchVisitor(EventSource session, boolean[] propertyVersionability) { + super(session); + this.propertyVersionability = propertyVersionability; + } + + boolean wasDirtyCollectionFound() { + return dirty; + } + + Object processCollection(Object collection, CollectionType type) + throws HibernateException { + + if (collection!=null) { + + SessionImplementor session = getSession(); + + final PersistentCollection persistentCollection; + if ( type.isArrayType() ) { + persistentCollection = session.getPersistenceContext().getCollectionHolder(collection); + // if no array holder we found an unwrappered array (this can't occur, + // because we now always call wrap() before getting to here) + // return (ah==null) ? true : searchForDirtyCollections(ah, type); + } + else { + // if not wrappered yet, its dirty (this can't occur, because + // we now always call wrap() before getting to here) + // return ( ! (obj instanceof PersistentCollection) ) ? + //true : searchForDirtyCollections( (PersistentCollection) obj, type ); + persistentCollection = (PersistentCollection) collection; + } + + if ( persistentCollection.isDirty() ) { //we need to check even if it was not initialized, because of delayed adds! + dirty=true; + return null; //NOTE: EARLY EXIT! + } + } + + return null; + } + + boolean includeEntityProperty(Object[] values, int i) { + return propertyVersionability[i] && super.includeEntityProperty(values, i); + } +} diff --git a/src/org/hibernate/event/def/EvictVisitor.java b/src/org/hibernate/event/def/EvictVisitor.java new file mode 100644 index 0000000000..ffb8f662d9 --- /dev/null +++ b/src/org/hibernate/event/def/EvictVisitor.java @@ -0,0 +1,68 @@ +//$Id$ +package org.hibernate.event.def; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.CollectionEntry; +import org.hibernate.engine.CollectionKey; +import org.hibernate.event.EventSource; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.CollectionType; + +/** + * Evict any collections referenced by the object from the session cache. + * This will NOT pick up any collections that were dereferenced, so they + * will be deleted (suboptimal but not exactly incorrect). + * + * @author Gavin King + */ +public class EvictVisitor extends AbstractVisitor { + + private static final Log log = LogFactory.getLog(EvictVisitor.class); + + EvictVisitor(EventSource session) { + super(session); + } + + Object processCollection(Object collection, CollectionType type) + throws HibernateException { + + if (collection!=null) evictCollection(collection, type); + + return null; + } + public void evictCollection(Object value, CollectionType type) { + + final Object pc; + if ( type.hasHolder( getSession().getEntityMode() ) ) { + pc = getSession().getPersistenceContext().removeCollectionHolder(value); + } + else if ( value instanceof PersistentCollection ) { + pc = value; + } + else { + return; //EARLY EXIT! + } + + PersistentCollection collection = (PersistentCollection) pc; + if ( collection.unsetSession( getSession() ) ) evictCollection(collection); + } + + private void evictCollection(PersistentCollection collection) { + CollectionEntry ce = (CollectionEntry) getSession().getPersistenceContext().getCollectionEntries().remove(collection); + if ( log.isDebugEnabled() ) + log.debug( + "evicting collection: " + + MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(), getSession().getFactory() ) + ); + if ( ce.getLoadedPersister() != null && ce.getLoadedKey() != null ) { + //TODO: is this 100% correct? + getSession().getPersistenceContext().getCollectionsByKey().remove( + new CollectionKey( ce.getLoadedPersister(), ce.getLoadedKey(), getSession().getEntityMode() ) + ); + } + } + +} diff --git a/src/org/hibernate/event/def/FlushVisitor.java b/src/org/hibernate/event/def/FlushVisitor.java new file mode 100644 index 0000000000..89137405a9 --- /dev/null +++ b/src/org/hibernate/event/def/FlushVisitor.java @@ -0,0 +1,49 @@ +//$Id$ +package org.hibernate.event.def; + +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.Collections; +import org.hibernate.event.EventSource; +import org.hibernate.type.CollectionType; + +/** + * Process collections reachable from an entity. This + * visitor assumes that wrap was already performed for + * the entity. + * + * @author Gavin King + */ +public class FlushVisitor extends AbstractVisitor { + + private Object owner; + + Object processCollection(Object collection, CollectionType type) + throws HibernateException { + + if (collection==CollectionType.UNFETCHED_COLLECTION) { + return null; + } + + if (collection!=null) { + final PersistentCollection coll; + if ( type.hasHolder( getSession().getEntityMode() ) ) { + coll = getSession().getPersistenceContext().getCollectionHolder(collection); + } + else { + coll = (PersistentCollection) collection; + } + + Collections.processReachableCollection( coll, type, owner, getSession() ); + } + + return null; + + } + + FlushVisitor(EventSource session, Object owner) { + super(session); + this.owner = owner; + } + +} diff --git a/src/org/hibernate/event/def/OnLockVisitor.java b/src/org/hibernate/event/def/OnLockVisitor.java new file mode 100644 index 0000000000..ad93361ca8 --- /dev/null +++ b/src/org/hibernate/event/def/OnLockVisitor.java @@ -0,0 +1,68 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.CollectionType; + +/** + * When a transient entity is passed to lock(), we must inspect all its collections and + * 1. associate any uninitialized PersistentCollections with this session + * 2. associate any initialized PersistentCollections with this session, using the + * existing snapshot + * 3. throw an exception for each "new" collection + * + * @author Gavin King + */ +public class OnLockVisitor extends ReattachVisitor { + + public OnLockVisitor(EventSource session, Serializable key, Object owner) { + super( session, key, owner ); + } + + Object processCollection(Object collection, CollectionType type) throws HibernateException { + + SessionImplementor session = getSession(); + CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() ); + + if ( collection == null ) { + //do nothing + } + else if ( collection instanceof PersistentCollection ) { + PersistentCollection persistentCollection = ( PersistentCollection ) collection; + if ( persistentCollection.setCurrentSession( session ) ) { + if ( isOwnerUnchanged( persistentCollection, persister, extractCollectionKeyFromOwner( persister ) ) ) { + // a "detached" collection that originally belonged to the same entity + if ( persistentCollection.isDirty() ) { + throw new HibernateException( "reassociated object has dirty collection" ); + } + reattachCollection( persistentCollection, type ); + } + else { + // a "detached" collection that belonged to a different entity + throw new HibernateException( "reassociated object has dirty collection reference" ); + } + } + else { + // a collection loaded in the current session + // can not possibly be the collection belonging + // to the entity passed to update() + throw new HibernateException( "reassociated object has dirty collection reference" ); + } + } + else { + // brand new collection + //TODO: or an array!! we can't lock objects with arrays now?? + throw new HibernateException( "reassociated object has dirty collection reference (or an array)" ); + } + + return null; + + } + +} diff --git a/src/org/hibernate/event/def/OnReplicateVisitor.java b/src/org/hibernate/event/def/OnReplicateVisitor.java new file mode 100644 index 0000000000..951a1abf37 --- /dev/null +++ b/src/org/hibernate/event/def/OnReplicateVisitor.java @@ -0,0 +1,66 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.event.EventSource; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.CollectionType; + +/** + * When an entity is passed to replicate(), and there is an existing row, we must + * inspect all its collections and + * 1. associate any uninitialized PersistentCollections with this session + * 2. associate any initialized PersistentCollections with this session, using the + * existing snapshot + * 3. execute a collection removal (SQL DELETE) for each null collection property + * or "new" collection + * + * @author Gavin King + */ +public class OnReplicateVisitor extends ReattachVisitor { + + private boolean isUpdate; + + OnReplicateVisitor(EventSource session, Serializable key, Object owner, boolean isUpdate) { + super( session, key, owner ); + this.isUpdate = isUpdate; + } + + Object processCollection(Object collection, CollectionType type) + throws HibernateException { + + if ( collection == CollectionType.UNFETCHED_COLLECTION ) { + return null; + } + + EventSource session = getSession(); + CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() ); + + if ( isUpdate ) { + removeCollection( persister, extractCollectionKeyFromOwner( persister ), session ); + } + if ( collection != null && ( collection instanceof PersistentCollection ) ) { + PersistentCollection wrapper = ( PersistentCollection ) collection; + wrapper.setCurrentSession( session ); + if ( wrapper.wasInitialized() ) { + session.getPersistenceContext().addNewCollection( persister, wrapper ); + } + else { + reattachCollection( wrapper, type ); + } + } + else { + // otherwise a null or brand new collection + // this will also (inefficiently) handle arrays, which + // have no snapshot, so we can't do any better + //processArrayOrNewCollection(collection, type); + } + + return null; + + } + +} diff --git a/src/org/hibernate/event/def/OnUpdateVisitor.java b/src/org/hibernate/event/def/OnUpdateVisitor.java new file mode 100644 index 0000000000..48b365f26a --- /dev/null +++ b/src/org/hibernate/event/def/OnUpdateVisitor.java @@ -0,0 +1,69 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.event.EventSource; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.CollectionType; + +/** + * When an entity is passed to update(), we must inspect all its collections and + * 1. associate any uninitialized PersistentCollections with this session + * 2. associate any initialized PersistentCollections with this session, using the + * existing snapshot + * 3. execute a collection removal (SQL DELETE) for each null collection property + * or "new" collection + * + * @author Gavin King + */ +public class OnUpdateVisitor extends ReattachVisitor { + + OnUpdateVisitor(EventSource session, Serializable key, Object owner) { + super( session, key, owner ); + } + + /** + * {@inheritDoc} + */ + Object processCollection(Object collection, CollectionType type) throws HibernateException { + + if ( collection == CollectionType.UNFETCHED_COLLECTION ) { + return null; + } + + EventSource session = getSession(); + CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() ); + + final Serializable collectionKey = extractCollectionKeyFromOwner( persister ); + if ( collection!=null && (collection instanceof PersistentCollection) ) { + PersistentCollection wrapper = (PersistentCollection) collection; + if ( wrapper.setCurrentSession(session) ) { + //a "detached" collection! + if ( !isOwnerUnchanged( wrapper, persister, collectionKey ) ) { + // if the collection belonged to a different entity, + // clean up the existing state of the collection + removeCollection( persister, collectionKey, session ); + } + reattachCollection(wrapper, type); + } + else { + // a collection loaded in the current session + // can not possibly be the collection belonging + // to the entity passed to update() + removeCollection(persister, collectionKey, session); + } + } + else { + // null or brand new collection + // this will also (inefficiently) handle arrays, which have + // no snapshot, so we can't do any better + removeCollection(persister, collectionKey, session); + } + + return null; + } + +} diff --git a/src/org/hibernate/event/def/ProxyVisitor.java b/src/org/hibernate/event/def/ProxyVisitor.java new file mode 100644 index 0000000000..86207fad7a --- /dev/null +++ b/src/org/hibernate/event/def/ProxyVisitor.java @@ -0,0 +1,79 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.event.EventSource; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.type.CollectionType; +import org.hibernate.type.EntityType; + +/** + * Reassociates uninitialized proxies with the session + * @author Gavin King + */ +public abstract class ProxyVisitor extends AbstractVisitor { + + + public ProxyVisitor(EventSource session) { + super(session); + } + + Object processEntity(Object value, EntityType entityType) throws HibernateException { + + if (value!=null) { + getSession().getPersistenceContext().reassociateIfUninitializedProxy(value); + // if it is an initialized proxy, let cascade + // handle it later on + } + + return null; + } + + /** + * Has the owner of the collection changed since the collection + * was snapshotted and detached? + */ + protected static boolean isOwnerUnchanged( + final PersistentCollection snapshot, + final CollectionPersister persister, + final Serializable id + ) { + return isCollectionSnapshotValid(snapshot) && + persister.getRole().equals( snapshot.getRole() ) && + id.equals( snapshot.getKey() ); + } + + private static boolean isCollectionSnapshotValid(PersistentCollection snapshot) { + return snapshot != null && + snapshot.getRole() != null && + snapshot.getKey() != null; + } + + /** + * Reattach a detached (disassociated) initialized or uninitialized + * collection wrapper, using a snapshot carried with the collection + * wrapper + */ + protected void reattachCollection(PersistentCollection collection, CollectionType type) + throws HibernateException { + if ( collection.wasInitialized() ) { + CollectionPersister collectionPersister = getSession().getFactory() + .getCollectionPersister( type.getRole() ); + getSession().getPersistenceContext() + .addInitializedDetachedCollection( collectionPersister, collection ); + } + else { + if ( !isCollectionSnapshotValid(collection) ) { + throw new HibernateException( "could not reassociate uninitialized transient collection" ); + } + CollectionPersister collectionPersister = getSession().getFactory() + .getCollectionPersister( collection.getRole() ); + getSession().getPersistenceContext() + .addUninitializedDetachedCollection( collectionPersister, collection ); + } + } + +} diff --git a/src/org/hibernate/event/def/ReattachVisitor.java b/src/org/hibernate/event/def/ReattachVisitor.java new file mode 100644 index 0000000000..843eebb443 --- /dev/null +++ b/src/org/hibernate/event/def/ReattachVisitor.java @@ -0,0 +1,102 @@ +//$Id$ +package org.hibernate.event.def; + +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.action.CollectionRemoveAction; +import org.hibernate.event.EventSource; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.Type; + +/** + * Abstract superclass of visitors that reattach collections. + * + * @author Gavin King + */ +public abstract class ReattachVisitor extends ProxyVisitor { + + private static final Log log = LogFactory.getLog( ReattachVisitor.class ); + + private final Serializable ownerIdentifier; + private final Object owner; + + public ReattachVisitor(EventSource session, Serializable ownerIdentifier, Object owner) { + super( session ); + this.ownerIdentifier = ownerIdentifier; + this.owner = owner; + } + + /** + * Retrieve the identifier of the entity being visited. + * + * @return The entity's identifier. + */ + final Serializable getOwnerIdentifier() { + return ownerIdentifier; + } + + /** + * Retrieve the entity being visited. + * + * @return The entity. + */ + final Object getOwner() { + return owner; + } + + /** + * {@inheritDoc} + */ + Object processComponent(Object component, AbstractComponentType componentType) throws HibernateException { + Type[] types = componentType.getSubtypes(); + if ( component == null ) { + processValues( new Object[types.length], types ); + } + else { + super.processComponent( component, componentType ); + } + + return null; + } + + /** + * Schedules a collection for deletion. + * + * @param role The persister representing the collection to be removed. + * @param collectionKey The collection key (differs from owner-id in the case of property-refs). + * @param source The session from which the request originated. + * @throws HibernateException + */ + void removeCollection(CollectionPersister role, Serializable collectionKey, EventSource source) throws HibernateException { + if ( log.isTraceEnabled() ) { + log.trace( + "collection dereferenced while transient " + + MessageHelper.collectionInfoString( role, ownerIdentifier, source.getFactory() ) + ); + } + source.getActionQueue().addAction( new CollectionRemoveAction( null, role, collectionKey, false, source ) ); + } + + /** + * This version is slightly different in that here we need to assume that + * the owner is not yet associated with the session, and thus we cannot + * rely on the owner's EntityEntry snapshot... + * + * @param role The persister for the collection role being processed. + * @return + */ + final Serializable extractCollectionKeyFromOwner(CollectionPersister role) { + if ( role.getCollectionType().useLHSPrimaryKey() ) { + return ownerIdentifier; + } + else { + return ( Serializable ) role.getOwnerEntityPersister().getPropertyValue( owner, role.getCollectionType().getLHSPropertyName(), getSession().getEntityMode() ); + } + + } +} diff --git a/src/org/hibernate/event/def/WrapVisitor.java b/src/org/hibernate/event/def/WrapVisitor.java new file mode 100644 index 0000000000..7b80908f72 --- /dev/null +++ b/src/org/hibernate/event/def/WrapVisitor.java @@ -0,0 +1,137 @@ +//$Id$ +package org.hibernate.event.def; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.PersistenceContext; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.type.AbstractComponentType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.Type; + +/** + * Wrap collections in a Hibernate collection + * wrapper. + * @author Gavin King + */ +public class WrapVisitor extends ProxyVisitor { + + private static final Log log = LogFactory.getLog(WrapVisitor.class); + + boolean substitute = false; + + boolean isSubstitutionRequired() { + return substitute; + } + + WrapVisitor(EventSource session) { + super(session); + } + + Object processCollection(Object collection, CollectionType collectionType) + throws HibernateException { + + if ( collection!=null && (collection instanceof PersistentCollection) ) { + + final SessionImplementor session = getSession(); + PersistentCollection coll = (PersistentCollection) collection; + if ( coll.setCurrentSession(session) ) { + reattachCollection( coll, collectionType ); + } + return null; + + } + else { + return processArrayOrNewCollection(collection, collectionType); + } + + } + + final Object processArrayOrNewCollection(Object collection, CollectionType collectionType) + throws HibernateException { + + final SessionImplementor session = getSession(); + + if (collection==null) { + //do nothing + return null; + } + else { + CollectionPersister persister = session.getFactory().getCollectionPersister( collectionType.getRole() ); + + final PersistenceContext persistenceContext = session.getPersistenceContext(); + //TODO: move into collection type, so we can use polymorphism! + if ( collectionType.hasHolder( session.getEntityMode() ) ) { + + if (collection==CollectionType.UNFETCHED_COLLECTION) return null; + + PersistentCollection ah = persistenceContext.getCollectionHolder(collection); + if (ah==null) { + ah = collectionType.wrap(session, collection); + persistenceContext.addNewCollection( persister, ah ); + persistenceContext.addCollectionHolder(ah); + } + return null; + } + else { + + PersistentCollection persistentCollection = collectionType.wrap(session, collection); + persistenceContext.addNewCollection( persister, persistentCollection ); + + if ( log.isTraceEnabled() ) log.trace( "Wrapped collection in role: " + collectionType.getRole() ); + + return persistentCollection; //Force a substitution! + + } + + } + + } + + void processValue(int i, Object[] values, Type[] types) { + Object result = processValue( values[i], types[i] ); + if (result!=null) { + substitute = true; + values[i] = result; + } + } + + Object processComponent(Object component, AbstractComponentType componentType) + throws HibernateException { + + if (component!=null) { + Object[] values = componentType.getPropertyValues( component, getSession() ); + Type[] types = componentType.getSubtypes(); + boolean substituteComponent = false; + for ( int i=0; i + + +

    + This package defines a default set of event listeners that + implements the default behaviors of Hibernate. +

    + + diff --git a/src/org/hibernate/event/package.html b/src/org/hibernate/event/package.html new file mode 100755 index 0000000000..653fe1489c --- /dev/null +++ b/src/org/hibernate/event/package.html @@ -0,0 +1,8 @@ + + + +

    + This package defines an event framework for Hibernate. +

    + + diff --git a/src/org/hibernate/exception/CacheSQLStateConverter.java b/src/org/hibernate/exception/CacheSQLStateConverter.java new file mode 100644 index 0000000000..6bb27a9991 --- /dev/null +++ b/src/org/hibernate/exception/CacheSQLStateConverter.java @@ -0,0 +1,93 @@ +// $Id: $ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; +import java.util.HashSet; +import java.util.Set; + +/** + * A SQLExceptionConverter implementation specific to Caché SQL, + * accounting for its custom integrity constraint violation error codes. + * + * @author Jonathan Levinson + */ +public class CacheSQLStateConverter implements SQLExceptionConverter { + + private ViolatedConstraintNameExtracter extracter; + + private static final Set SQL_GRAMMAR_CATEGORIES = new HashSet(); + private static final Set DATA_CATEGORIES = new HashSet(); + private static final Set INTEGRITY_VIOLATION_CATEGORIES = new HashSet(); + private static final Set CONNECTION_CATEGORIES = new HashSet(); + + static { + SQL_GRAMMAR_CATEGORIES.add( "07" ); + SQL_GRAMMAR_CATEGORIES.add( "37" ); + SQL_GRAMMAR_CATEGORIES.add( "42" ); + SQL_GRAMMAR_CATEGORIES.add( "65" ); + SQL_GRAMMAR_CATEGORIES.add( "S0" ); + SQL_GRAMMAR_CATEGORIES.add( "20" ); + + DATA_CATEGORIES.add( "22" ); + DATA_CATEGORIES.add( "21" ); + DATA_CATEGORIES.add( "02" ); + + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 119 ) ); + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 120 ) ); + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 121 ) ); + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 122 ) ); + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 123 ) ); + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 124 ) ); + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 125 ) ); + INTEGRITY_VIOLATION_CATEGORIES.add( new Integer( 127 ) ); + + CONNECTION_CATEGORIES.add( "08" ); + } + + public CacheSQLStateConverter(ViolatedConstraintNameExtracter extracter) { + this.extracter = extracter; + } + + /** + * Convert the given SQLException into Hibernate's JDBCException hierarchy. + * + * @param sqlException The SQLException to be converted. + * @param message An optional error message. + * @param sql Optionally, the sql being performed when the exception occurred. + * @return The resulting JDBCException. + */ + public JDBCException convert(SQLException sqlException, String message, String sql) { + String sqlStateClassCode = JDBCExceptionHelper.extractSqlStateClassCode( sqlException ); + Integer errorCode = new Integer( JDBCExceptionHelper.extractErrorCode( sqlException ) ); + if ( sqlStateClassCode != null ) { + if ( SQL_GRAMMAR_CATEGORIES.contains( sqlStateClassCode ) ) { + return new SQLGrammarException( message, sqlException, sql ); + } + else if ( INTEGRITY_VIOLATION_CATEGORIES.contains( errorCode ) ) { + String constraintName = extracter.extractConstraintName( sqlException ); + return new ConstraintViolationException( message, sqlException, sql, constraintName ); + } + else if ( CONNECTION_CATEGORIES.contains( sqlStateClassCode ) ) { + return new JDBCConnectionException( message, sqlException, sql ); + } + else if ( DATA_CATEGORIES.contains( sqlStateClassCode ) ) { + return new DataException( message, sqlException, sql ); + } + } + return handledNonSpecificException( sqlException, message, sql ); + } + + /** + * Handle an exception not converted to a specific type based on the SQLState. + * + * @param sqlException The exception to be handled. + * @param message An optional message + * @param sql Optionally, the sql being performed when the exception occurred. + * @return The converted exception; should never be null. + */ + protected JDBCException handledNonSpecificException(SQLException sqlException, String message, String sql) { + return new GenericJDBCException( message, sqlException, sql ); + } +} diff --git a/src/org/hibernate/exception/Configurable.java b/src/org/hibernate/exception/Configurable.java new file mode 100644 index 0000000000..6554e9c76e --- /dev/null +++ b/src/org/hibernate/exception/Configurable.java @@ -0,0 +1,24 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.HibernateException; + +import java.util.Properties; + +/** + * The Configurable interface defines the contract for SQLExceptionConverter impls that + * want to be configured prior to usage given the currently defined Hibernate properties. + * + * @author Steve Ebersole + */ +public interface Configurable { + // todo: this might really even be moved into the cfg package and used as the basis for all things which are configurable. + + /** + * Configure the component, using the given settings and properties. + * + * @param properties All defined startup properties. + * @throws HibernateException Indicates a configuration exception. + */ + public void configure(Properties properties) throws HibernateException; +} diff --git a/src/org/hibernate/exception/ConstraintViolationException.java b/src/org/hibernate/exception/ConstraintViolationException.java new file mode 100644 index 0000000000..24b746b486 --- /dev/null +++ b/src/org/hibernate/exception/ConstraintViolationException.java @@ -0,0 +1,36 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; + +/** + * Implementation of JDBCException indicating that the requested DML operation + * resulted in a violation of a defined integrity constraint. + * + * @author Steve Ebersole + */ +public class ConstraintViolationException extends JDBCException { + + private String constraintName; + + public ConstraintViolationException(String message, SQLException root, String constraintName) { + super( message, root ); + this.constraintName = constraintName; + } + + public ConstraintViolationException(String message, SQLException root, String sql, String constraintName) { + super( message, root, sql ); + this.constraintName = constraintName; + } + + /** + * Returns the name of the violated constraint, if known. + * + * @return The name of the violated constraint, or null if not known. + */ + public String getConstraintName() { + return constraintName; + } +} diff --git a/src/org/hibernate/exception/DataException.java b/src/org/hibernate/exception/DataException.java new file mode 100755 index 0000000000..dd24fb70df --- /dev/null +++ b/src/org/hibernate/exception/DataException.java @@ -0,0 +1,34 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; + +/** + * Implementation of JDBCException indicating that evaluation of the + * valid SQL statement against the given data resulted in some + * illegal operation, mismatched types or incorrect cardinality. + * + * @author Gavin King + */ +public class DataException extends JDBCException { + /** + * Constructor for JDBCException. + * + * @param root The underlying exception. + */ + public DataException(String message, SQLException root) { + super( message, root ); + } + + /** + * Constructor for JDBCException. + * + * @param message Optional message. + * @param root The underlying exception. + */ + public DataException(String message, SQLException root, String sql) { + super( message, root, sql ); + } +} diff --git a/src/org/hibernate/exception/ExceptionUtils.java b/src/org/hibernate/exception/ExceptionUtils.java new file mode 100644 index 0000000000..5c6de28879 --- /dev/null +++ b/src/org/hibernate/exception/ExceptionUtils.java @@ -0,0 +1,734 @@ +/* ==================================================================== + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2002-2003 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowledgement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowledgement may appear in the software itself, + * if and wherever such third-party acknowledgements normally appear. + * + * 4. The names "The Jakarta Project", "Commons", and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Software Foundation. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ +package org.hibernate.exception; + +import org.hibernate.util.ArrayHelper; + +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.StringTokenizer; + +/** + *

    Provides utilities for manipulating and examining + * Throwable objects.

    + * + * @author Daniel Rall + * @author Dmitri Plotnikov + * @author Stephen Colebourne + * @author Gary Gregory + * @author Pete Gieser + * @version $Id$ + * @since 1.0 + */ +public final class ExceptionUtils { + + private static final String LINE_SEPARATOR = System.getProperty( "line.separator" ); + + /** + *

    Used when printing stack frames to denote the start of a + * wrapped exception.

    + *

    + *

    Package private for accessibility by test suite.

    + */ + static final String WRAPPED_MARKER = " [wrapped] "; + + /** + *

    The names of methods commonly used to access a wrapped exception.

    + */ + private static final String[] CAUSE_METHOD_NAMES = { + "getCause", + "getNextException", + "getTargetException", + "getException", + "getSourceException", + "getRootCause", + "getCausedByException", + "getNested" + }; + + /** + *

    The Method object for JDK1.4 getCause.

    + */ + private static final Method THROWABLE_CAUSE_METHOD; + + static { + Method getCauseMethod; + try { + getCauseMethod = Throwable.class.getMethod( "getCause", null ); + } + catch ( Exception e ) { + getCauseMethod = null; + } + THROWABLE_CAUSE_METHOD = getCauseMethod; + } + + private ExceptionUtils() { + } + + //----------------------------------------------------------------------- + /** + *

    Adds to the list of method names used in the search for Throwable + * objects.

    + * + * @param methodName the methodName to add to the list, null + * and empty strings are ignored + * @since 2.0 + */ + /*public static void addCauseMethodName(String methodName) { + if ( StringHelper.isNotEmpty(methodName) ) { + List list = new ArrayList( Arrays.asList(CAUSE_METHOD_NAMES ); + list.add(methodName); + CAUSE_METHOD_NAMES = (String[]) list.toArray(new String[list.size()]); + } + }*/ + + /** + *

    Introspects the Throwable to obtain the cause.

    + *

    + *

    The method searches for methods with specific names that return a + * Throwable object. This will pick up most wrapping exceptions, + * including those from JDK 1.4, and + * {@link org.apache.commons.lang.exception.NestableException NestableException}. + * The method names can be added to using {@link #addCauseMethodName(String)}.

    + *

    + *

    The default list searched for are:

    + *
      + *
    • getCause()
    • + *
    • getNextException()
    • + *
    • getTargetException()
    • + *
    • getException()
    • + *
    • getSourceException()
    • + *
    • getRootCause()
    • + *
    • getCausedByException()
    • + *
    • getNested()
    • + *
    + *

    + *

    In the absence of any such method, the object is inspected for a + * detail field assignable to a Throwable.

    + *

    + *

    If none of the above is found, returns null.

    + * + * @param throwable the throwable to introspect for a cause, may be null + * @return the cause of the Throwable, + * null if none found or null throwable input + */ + public static Throwable getCause(Throwable throwable) { + return getCause( throwable, CAUSE_METHOD_NAMES ); + } + + /** + *

    Introspects the Throwable to obtain the cause.

    + *

    + *

      + *
    1. Try known exception types.
    2. + *
    3. Try the supplied array of method names.
    4. + *
    5. Try the field 'detail'.
    6. + *
    + *

    + *

    A null set of method names means use the default set. + * A null in the set of method names will be ignored.

    + * + * @param throwable the throwable to introspect for a cause, may be null + * @param methodNames the method names, null treated as default set + * @return the cause of the Throwable, + * null if none found or null throwable input + */ + public static Throwable getCause(Throwable throwable, String[] methodNames) { + if ( throwable == null ) { + return null; + } + Throwable cause = getCauseUsingWellKnownTypes( throwable ); + if ( cause == null ) { + if ( methodNames == null ) { + methodNames = CAUSE_METHOD_NAMES; + } + for ( int i = 0; i < methodNames.length; i++ ) { + String methodName = methodNames[i]; + if ( methodName != null ) { + cause = getCauseUsingMethodName( throwable, methodName ); + if ( cause != null ) { + break; + } + } + } + + if ( cause == null ) { + cause = getCauseUsingFieldName( throwable, "detail" ); + } + } + return cause; + } + + /** + *

    Introspects the Throwable to obtain the root cause.

    + *

    + *

    This method walks through the exception chain to the last element, + * "root" of the tree, using {@link #getCause(Throwable)}, and + * returns that exception.

    + * + * @param throwable the throwable to get the root cause for, may be null + * @return the root cause of the Throwable, + * null if none found or null throwable input + */ + public static Throwable getRootCause(Throwable throwable) { + Throwable cause = getCause( throwable ); + if ( cause != null ) { + throwable = cause; + while ( ( throwable = getCause( throwable ) ) != null ) { + cause = throwable; + } + } + return cause; + } + + /** + *

    Finds a Throwable for known types.

    + *

    + *

    Uses instanceof checks to examine the exception, + * looking for well known types which could contain chained or + * wrapped exceptions.

    + * + * @param throwable the exception to examine + * @return the wrapped exception, or null if not found + */ + private static Throwable getCauseUsingWellKnownTypes(Throwable throwable) { + if ( throwable instanceof Nestable ) { + return ( ( Nestable ) throwable ).getCause(); + } + else if ( throwable instanceof SQLException ) { + return ( ( SQLException ) throwable ).getNextException(); + } + else if ( throwable instanceof InvocationTargetException ) { + return ( ( InvocationTargetException ) throwable ).getTargetException(); + } + else { + return null; + } + } + + /** + *

    Finds a Throwable by method name.

    + * + * @param throwable the exception to examine + * @param methodName the name of the method to find and invoke + * @return the wrapped exception, or null if not found + */ + private static Throwable getCauseUsingMethodName(Throwable throwable, String methodName) { + Method method = null; + try { + method = throwable.getClass().getMethod( methodName, null ); + } + catch ( NoSuchMethodException ignored ) { + } + catch ( SecurityException ignored ) { + } + + if ( method != null && Throwable.class.isAssignableFrom( method.getReturnType() ) ) { + try { + return ( Throwable ) method.invoke( throwable, ArrayHelper.EMPTY_OBJECT_ARRAY ); + } + catch ( IllegalAccessException ignored ) { + } + catch ( IllegalArgumentException ignored ) { + } + catch ( InvocationTargetException ignored ) { + } + } + return null; + } + + /** + *

    Finds a Throwable by field name.

    + * + * @param throwable the exception to examine + * @param fieldName the name of the attribute to examine + * @return the wrapped exception, or null if not found + */ + private static Throwable getCauseUsingFieldName(Throwable throwable, String fieldName) { + Field field = null; + try { + field = throwable.getClass().getField( fieldName ); + } + catch ( NoSuchFieldException ignored ) { + } + catch ( SecurityException ignored ) { + } + + if ( field != null && Throwable.class.isAssignableFrom( field.getType() ) ) { + try { + return ( Throwable ) field.get( throwable ); + } + catch ( IllegalAccessException ignored ) { + } + catch ( IllegalArgumentException ignored ) { + } + } + return null; + } + + //----------------------------------------------------------------------- + /** + *

    Checks if the Throwable class has a getCause method.

    + *

    + *

    This is true for JDK 1.4 and above.

    + * + * @return true if Throwable is nestable + * @since 2.0 + */ + public static boolean isThrowableNested() { + return ( THROWABLE_CAUSE_METHOD != null ); + } + + /** + *

    Checks whether this Throwable class can store a cause.

    + *

    + *

    This method does not check whether it actually does store a cause.

    + * + * @param throwable the Throwable to examine, may be null + * @return boolean true if nested otherwise false + * @since 2.0 + */ + public static boolean isNestedThrowable(Throwable throwable) { + if ( throwable == null ) { + return false; + } + + if ( throwable instanceof Nestable ) { + return true; + } + else if ( throwable instanceof SQLException ) { + return true; + } + else if ( throwable instanceof InvocationTargetException ) { + return true; + } + else if ( isThrowableNested() ) { + return true; + } + + Class cls = throwable.getClass(); + for ( int i = 0, isize = CAUSE_METHOD_NAMES.length; i < isize; i++ ) { + try { + Method method = cls.getMethod( CAUSE_METHOD_NAMES[i], null ); + if ( method != null && Throwable.class.isAssignableFrom( method.getReturnType() ) ) { + return true; + } + } + catch ( NoSuchMethodException ignored ) { + } + catch ( SecurityException ignored ) { + } + } + + try { + Field field = cls.getField( "detail" ); + if ( field != null ) { + return true; + } + } + catch ( NoSuchFieldException ignored ) { + } + catch ( SecurityException ignored ) { + } + + return false; + } + + //----------------------------------------------------------------------- + /** + *

    Counts the number of Throwable objects in the + * exception chain.

    + *

    + *

    A throwable without cause will return 1. + * A throwable with one cause will return 2 and so on. + * A null throwable will return 0.

    + * + * @param throwable the throwable to inspect, may be null + * @return the count of throwables, zero if null input + */ + public static int getThrowableCount(Throwable throwable) { + int count = 0; + while ( throwable != null ) { + count++; + throwable = ExceptionUtils.getCause( throwable ); + } + return count; + } + + /** + *

    Returns the list of Throwable objects in the + * exception chain.

    + *

    + *

    A throwable without cause will return an array containing + * one element - the input throwable. + * A throwable with one cause will return an array containing + * two elements. - the input throwable and the cause throwable. + * A null throwable will return an array size zero.

    + * + * @param throwable the throwable to inspect, may be null + * @return the array of throwables, never null + */ + public static Throwable[] getThrowables(Throwable throwable) { + List list = new ArrayList(); + while ( throwable != null ) { + list.add( throwable ); + throwable = ExceptionUtils.getCause( throwable ); + } + return ( Throwable[] ) list.toArray( new Throwable[list.size()] ); + } + + //----------------------------------------------------------------------- + /** + *

    Returns the (zero based) index of the first Throwable + * that matches the specified type in the exception chain.

    + *

    + *

    A null throwable returns -1. + * A null type returns -1. + * No match in the chain returns -1.

    + * + * @param throwable the throwable to inspect, may be null + * @param type the type to search for + * @return the index into the throwable chain, -1 if no match or null input + */ + public static int indexOfThrowable(Throwable throwable, Class type) { + return indexOfThrowable( throwable, type, 0 ); + } + + /** + *

    Returns the (zero based) index of the first Throwable + * that matches the specified type in the exception chain from + * a specified index.

    + *

    + *

    A null throwable returns -1. + * A null type returns -1. + * No match in the chain returns -1. + * A negative start index is treated as zero. + * A start index greater than the number of throwables returns -1.

    + * + * @param throwable the throwable to inspect, may be null + * @param type the type to search for + * @param fromIndex the (zero based) index of the starting position, + * negative treated as zero, larger than chain size returns -1 + * @return the index into the throwable chain, -1 if no match or null input + */ + public static int indexOfThrowable(Throwable throwable, Class type, int fromIndex) { + if ( throwable == null ) { + return -1; + } + if ( fromIndex < 0 ) { + fromIndex = 0; + } + Throwable[] throwables = ExceptionUtils.getThrowables( throwable ); + if ( fromIndex >= throwables.length ) { + return -1; + } + for ( int i = fromIndex; i < throwables.length; i++ ) { + if ( throwables[i].getClass().equals( type ) ) { + return i; + } + } + return -1; + } + + //----------------------------------------------------------------------- + /** + *

    Prints a compact stack trace for the root cause of a throwable + * to System.err.

    + *

    + *

    The compact stack trace starts with the root cause and prints + * stack frames up to the place where it was caught and wrapped. + * Then it prints the wrapped exception and continues with stack frames + * until the wrapper exception is caught and wrapped again, etc.

    + *

    + *

    The method is equivalent to printStackTrace for throwables + * that don't have nested causes.

    + * + * @param throwable the throwable to output + * @since 2.0 + */ + public static void printRootCauseStackTrace(Throwable throwable) { + printRootCauseStackTrace( throwable, System.err ); + } + + /** + *

    Prints a compact stack trace for the root cause of a throwable.

    + *

    + *

    The compact stack trace starts with the root cause and prints + * stack frames up to the place where it was caught and wrapped. + * Then it prints the wrapped exception and continues with stack frames + * until the wrapper exception is caught and wrapped again, etc.

    + *

    + *

    The method is equivalent to printStackTrace for throwables + * that don't have nested causes.

    + * + * @param throwable the throwable to output, may be null + * @param stream the stream to output to, may not be null + * @throws IllegalArgumentException if the stream is null + * @since 2.0 + */ + public static void printRootCauseStackTrace(Throwable throwable, PrintStream stream) { + if ( throwable == null ) { + return; + } + if ( stream == null ) { + throw new IllegalArgumentException( "The PrintStream must not be null" ); + } + String trace[] = getRootCauseStackTrace( throwable ); + for ( int i = 0; i < trace.length; i++ ) { + stream.println( trace[i] ); + } + stream.flush(); + } + + /** + *

    Prints a compact stack trace for the root cause of a throwable.

    + *

    + *

    The compact stack trace starts with the root cause and prints + * stack frames up to the place where it was caught and wrapped. + * Then it prints the wrapped exception and continues with stack frames + * until the wrapper exception is caught and wrapped again, etc.

    + *

    + *

    The method is equivalent to printStackTrace for throwables + * that don't have nested causes.

    + * + * @param throwable the throwable to output, may be null + * @param writer the writer to output to, may not be null + * @throws IllegalArgumentException if the writer is null + * @since 2.0 + */ + public static void printRootCauseStackTrace(Throwable throwable, PrintWriter writer) { + if ( throwable == null ) { + return; + } + if ( writer == null ) { + throw new IllegalArgumentException( "The PrintWriter must not be null" ); + } + String trace[] = getRootCauseStackTrace( throwable ); + for ( int i = 0; i < trace.length; i++ ) { + writer.println( trace[i] ); + } + writer.flush(); + } + + //----------------------------------------------------------------------- + /** + *

    Creates a compact stack trace for the root cause of the supplied + * Throwable.

    + * + * @param throwable the throwable to examine, may be null + * @return an array of stack trace frames, never null + * @since 2.0 + */ + public static String[] getRootCauseStackTrace(Throwable throwable) { + if ( throwable == null ) { + return ArrayHelper.EMPTY_STRING_ARRAY; + } + Throwable throwables[] = getThrowables( throwable ); + int count = throwables.length; + ArrayList frames = new ArrayList(); + List nextTrace = getStackFrameList( throwables[count - 1] ); + for ( int i = count; --i >= 0; ) { + List trace = nextTrace; + if ( i != 0 ) { + nextTrace = getStackFrameList( throwables[i - 1] ); + removeCommonFrames( trace, nextTrace ); + } + if ( i == count - 1 ) { + frames.add( throwables[i].toString() ); + } + else { + frames.add( WRAPPED_MARKER + throwables[i].toString() ); + } + for ( int j = 0; j < trace.size(); j++ ) { + frames.add( trace.get( j ) ); + } + } + return ( String[] ) frames.toArray( new String[0] ); + } + + /** + *

    Removes common frames from the cause trace given the two stack traces.

    + * + * @param causeFrames stack trace of a cause throwable + * @param wrapperFrames stack trace of a wrapper throwable + * @throws IllegalArgumentException if either argument is null + * @since 2.0 + */ + public static void removeCommonFrames(List causeFrames, List wrapperFrames) { + if ( causeFrames == null || wrapperFrames == null ) { + throw new IllegalArgumentException( "The List must not be null" ); + } + int causeFrameIndex = causeFrames.size() - 1; + int wrapperFrameIndex = wrapperFrames.size() - 1; + while ( causeFrameIndex >= 0 && wrapperFrameIndex >= 0 ) { + // Remove the frame from the cause trace if it is the same + // as in the wrapper trace + String causeFrame = ( String ) causeFrames.get( causeFrameIndex ); + String wrapperFrame = ( String ) wrapperFrames.get( wrapperFrameIndex ); + if ( causeFrame.equals( wrapperFrame ) ) { + causeFrames.remove( causeFrameIndex ); + } + causeFrameIndex--; + wrapperFrameIndex--; + } + } + + //----------------------------------------------------------------------- + /** + *

    Gets the stack trace from a Throwable as a String.

    + * + * @param throwable the Throwable to be examined + * @return the stack trace as generated by the exception's + * printStackTrace(PrintWriter) method + */ + public static String getStackTrace(Throwable throwable) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter( sw, true ); + throwable.printStackTrace( pw ); + return sw.getBuffer().toString(); + } + + /** + *

    A way to get the entire nested stack-trace of an throwable.

    + * + * @param throwable the Throwable to be examined + * @return the nested stack trace, with the root cause first + * @since 2.0 + */ + public static String getFullStackTrace(Throwable throwable) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter( sw, true ); + Throwable[] ts = getThrowables( throwable ); + for ( int i = 0; i < ts.length; i++ ) { + ts[i].printStackTrace( pw ); + if ( isNestedThrowable( ts[i] ) ) { + break; + } + } + return sw.getBuffer().toString(); + } + + //----------------------------------------------------------------------- + /** + *

    Captures the stack trace associated with the specified + * Throwable object, decomposing it into a list of + * stack frames.

    + * + * @param throwable the Throwable to exaamine, may be null + * @return an array of strings describing each stack frame, never null + */ + public static String[] getStackFrames(Throwable throwable) { + if ( throwable == null ) { + return ArrayHelper.EMPTY_STRING_ARRAY; + } + return getStackFrames( getStackTrace( throwable ) ); + } + + /** + *

    Functionality shared between the + * getStackFrames(Throwable) methods of this and the + * {@link org.apache.commons.lang.exception.NestableDelegate} + * classes.

    + */ + static String[] getStackFrames(String stackTrace) { + String linebreak = LINE_SEPARATOR; + StringTokenizer frames = new StringTokenizer( stackTrace, linebreak ); + List list = new LinkedList(); + while ( frames.hasMoreTokens() ) { + list.add( frames.nextToken() ); + } + return ( String[] ) list.toArray( new String[list.size()] ); + } + + /** + *

    Produces a List of stack frames - the message + * is not included.

    + *

    + *

    This works in most cases - it will only fail if the exception + * message contains a line that starts with: + * "   at".

    + * + * @param t is any throwable + * @return List of stack frames + */ + static List getStackFrameList(Throwable t) { + String stackTrace = getStackTrace( t ); + String linebreak = LINE_SEPARATOR; + StringTokenizer frames = new StringTokenizer( stackTrace, linebreak ); + List list = new LinkedList(); + boolean traceStarted = false; + while ( frames.hasMoreTokens() ) { + String token = frames.nextToken(); + // Determine if the line starts with at + int at = token.indexOf( "at" ); + if ( at != -1 && token.substring( 0, at ).trim().length() == 0 ) { + traceStarted = true; + list.add( token ); + } + else if ( traceStarted ) { + break; + } + } + return list; + } + +} diff --git a/src/org/hibernate/exception/GenericJDBCException.java b/src/org/hibernate/exception/GenericJDBCException.java new file mode 100644 index 0000000000..93b0fb15a6 --- /dev/null +++ b/src/org/hibernate/exception/GenericJDBCException.java @@ -0,0 +1,21 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; + +/** + * Generic, non-specific JDBCException. + * + * @author Steve Ebersole + */ +public class GenericJDBCException extends JDBCException { + public GenericJDBCException(String string, SQLException root) { + super( string, root ); + } + + public GenericJDBCException(String string, SQLException root, String sql) { + super( string, root, sql ); + } +} diff --git a/src/org/hibernate/exception/JDBCConnectionException.java b/src/org/hibernate/exception/JDBCConnectionException.java new file mode 100644 index 0000000000..c09b5a6571 --- /dev/null +++ b/src/org/hibernate/exception/JDBCConnectionException.java @@ -0,0 +1,22 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; + +/** + * Implementation of JDBCException indicating problems with communicating with the + * database (can also include incorrect JDBC setup). + * + * @author Steve Ebersole + */ +public class JDBCConnectionException extends JDBCException { + public JDBCConnectionException(String string, SQLException root) { + super( string, root ); + } + + public JDBCConnectionException(String string, SQLException root, String sql) { + super( string, root, sql ); + } +} diff --git a/src/org/hibernate/exception/JDBCExceptionHelper.java b/src/org/hibernate/exception/JDBCExceptionHelper.java new file mode 100644 index 0000000000..1d1237bc84 --- /dev/null +++ b/src/org/hibernate/exception/JDBCExceptionHelper.java @@ -0,0 +1,94 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; +import org.hibernate.util.JDBCExceptionReporter; + +import java.sql.SQLException; + +/** + * Implementation of JDBCExceptionHelper. + * + * @author Steve Ebersole + */ +public final class JDBCExceptionHelper { + + private JDBCExceptionHelper() { + } + + /** + * Converts the given SQLException into Hibernate's JDBCException hierarchy, as well as performing + * appropriate logging. + * + * @param converter The converter to use. + * @param sqlException The exception to convert. + * @param message An optional error message. + * @return The converted JDBCException. + */ + public static JDBCException convert(SQLExceptionConverter converter, SQLException sqlException, String message) { + return convert( converter, sqlException, message, "???" ); + } + + /** + * Converts the given SQLException into Hibernate's JDBCException hierarchy, as well as performing + * appropriate logging. + * + * @param converter The converter to use. + * @param sqlException The exception to convert. + * @param message An optional error message. + * @return The converted JDBCException. + */ + public static JDBCException convert(SQLExceptionConverter converter, SQLException sqlException, String message, String sql) { + JDBCExceptionReporter.logExceptions( sqlException, message + " [" + sql + "]" ); + return converter.convert( sqlException, message, sql ); + } + + /** + * For the given SQLException, locates the vendor-specific error code. + * + * @param sqlException The exception from which to extract the SQLState + * @return The error code. + */ + public static int extractErrorCode(SQLException sqlException) { + int errorCode = sqlException.getErrorCode(); + SQLException nested = sqlException.getNextException(); + while ( errorCode == 0 && nested != null ) { + errorCode = nested.getErrorCode(); + nested = nested.getNextException(); + } + return errorCode; + } + + /** + * For the given SQLException, locates the X/Open-compliant SQLState. + * + * @param sqlException The exception from which to extract the SQLState + * @return The SQLState code, or null. + */ + public static String extractSqlState(SQLException sqlException) { + String sqlState = sqlException.getSQLState(); + SQLException nested = sqlException.getNextException(); + while ( sqlState == null && nested != null ) { + sqlState = nested.getSQLState(); + nested = nested.getNextException(); + } + return sqlState; + } + + /** + * For the given SQLException, locates the X/Open-compliant SQLState's class code. + * + * @param sqlException The exception from which to extract the SQLState class code + * @return The SQLState class code, or null. + */ + public static String extractSqlStateClassCode(SQLException sqlException) { + return determineSqlStateClassCode( extractSqlState( sqlException ) ); + } + + public static String determineSqlStateClassCode(String sqlState) { + if ( sqlState == null || sqlState.length() < 2 ) { + return sqlState; + } + return sqlState.substring( 0, 2 ); + } +} diff --git a/src/org/hibernate/exception/LockAcquisitionException.java b/src/org/hibernate/exception/LockAcquisitionException.java new file mode 100644 index 0000000000..2db54126de --- /dev/null +++ b/src/org/hibernate/exception/LockAcquisitionException.java @@ -0,0 +1,22 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; + +/** + * Implementation of JDBCException indicating a problem acquiring lock + * on the database. + * + * @author Steve Ebersole + */ +public class LockAcquisitionException extends JDBCException { + public LockAcquisitionException(String string, SQLException root) { + super( string, root ); + } + + public LockAcquisitionException(String string, SQLException root, String sql) { + super( string, root, sql ); + } +} diff --git a/src/org/hibernate/exception/Nestable.java b/src/org/hibernate/exception/Nestable.java new file mode 100644 index 0000000000..ffcc311bb6 --- /dev/null +++ b/src/org/hibernate/exception/Nestable.java @@ -0,0 +1,203 @@ +/* ==================================================================== + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2002-2003 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowledgement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowledgement may appear in the software itself, + * if and wherever such third-party acknowledgements normally appear. + * + * 4. The names "The Jakarta Project", "Commons", and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Software Foundation. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ +package org.hibernate.exception; + +import java.io.PrintStream; +import java.io.PrintWriter; + +/** + * An interface to be implemented by {@link java.lang.Throwable} + * extensions which would like to be able to nest root exceptions + * inside themselves. + * + * @author Daniel Rall + * @author Kasper Nielsen + * @author Steven Caswell + * @author Pete Gieser + * @version $Id$ + * @since 1.0 + */ +public interface Nestable { + + /** + * Returns the reference to the exception or error that caused the + * exception implementing the Nestable to be thrown. + * + * @return throwable that caused the original exception + */ + public Throwable getCause(); + + /** + * Returns the error message of this and any nested + * Throwable. + * + * @return the error message + */ + public String getMessage(); + + /** + * Returns the error message of the Throwable in the chain + * of Throwables at the specified index, numbererd from 0. + * + * @param index the index of the Throwable in the chain of + * Throwables + * @return the error message, or null if the Throwable at the + * specified index in the chain does not contain a message + * @throws IndexOutOfBoundsException if the index argument is + * negative or not less than the count of Throwables in the + * chain + */ + public String getMessage(int index); + + /** + * Returns the error message of this and any nested Throwables + * in an array of Strings, one element for each message. Any + * Throwable not containing a message is represented in the + * array by a null. This has the effect of cause the length of the returned + * array to be equal to the result of the {@link #getThrowableCount()} + * operation. + * + * @return the error messages + */ + public String[] getMessages(); + + /** + * Returns the Throwable in the chain of + * Throwables at the specified index, numbererd from 0. + * + * @param index the index, numbered from 0, of the Throwable in + * the chain of Throwables + * @return the Throwable + * @throws IndexOutOfBoundsException if the index argument is + * negative or not less than the count of Throwables in the + * chain + */ + public Throwable getThrowable(int index); + + /** + * Returns the number of nested Throwables represented by + * this Nestable, including this Nestable. + * + * @return the throwable count + */ + public int getThrowableCount(); + + /** + * Returns this Nestable and any nested Throwables + * in an array of Throwables, one element for each + * Throwable. + * + * @return the Throwables + */ + public Throwable[] getThrowables(); + + /** + * Returns the index, numbered from 0, of the first occurrence of the + * specified type in the chain of Throwables, or -1 if the + * specified type is not found in the chain. + * + * @param type Class to be found + * @return index of the first occurrence of the type in the chain, or -1 if + * the type is not found + */ + public int indexOfThrowable(Class type); + + /** + * Returns the index, numbered from 0, of the first Throwable + * that matches the specified type in the chain of Throwables + * with an index greater than or equal to the specified index, or -1 if + * the type is not found. + * + * @param type Class to be found + * @param fromIndex the index, numbered from 0, of the starting position in + * the chain to be searched + * @return index of the first occurrence of the type in the chain, or -1 if + * the type is not found + * @throws IndexOutOfBoundsException if the fromIndex argument + * is negative or not less than the count of Throwables in the + * chain + */ + public int indexOfThrowable(Class type, int fromIndex); + + /** + * Prints the stack trace of this exception to the specified print + * writer. Includes information from the exception, if any, + * which caused this exception. + * + * @param out PrintWriter to use for output. + */ + public void printStackTrace(PrintWriter out); + + /** + * Prints the stack trace of this exception to the specified print + * stream. Includes inforamation from the exception, if any, + * which caused this exception. + * + * @param out PrintStream to use for output. + */ + public void printStackTrace(PrintStream out); + + /** + * Prints the stack trace for this exception only--root cause not + * included--using the provided writer. Used by {@link + * org.apache.commons.lang.exception.NestableDelegate} to write + * individual stack traces to a buffer. The implementation of + * this method should call + * super.printStackTrace(out); in most cases. + * + * @param out The writer to use. + */ + public void printPartialStackTrace(PrintWriter out); + +} diff --git a/src/org/hibernate/exception/NestableDelegate.java b/src/org/hibernate/exception/NestableDelegate.java new file mode 100644 index 0000000000..b482b2ac5c --- /dev/null +++ b/src/org/hibernate/exception/NestableDelegate.java @@ -0,0 +1,412 @@ +/* ==================================================================== + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2002-2003 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowledgement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowledgement may appear in the software itself, + * if and wherever such third-party acknowledgements normally appear. + * + * 4. The names "The Jakarta Project", "Commons", and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Software Foundation. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ +package org.hibernate.exception; + +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.Serializable; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +/** + *

    A shared implementation of the nestable exception functionality.

    + *

    + * The code is shared between + * {@link org.apache.commons.lang.exception.NestableError NestableError}, + * {@link org.apache.commons.lang.exception.NestableException NestableException} and + * {@link org.apache.commons.lang.exception.NestableRuntimeException NestableRuntimeException}. + *

    + * + * @author Rafal Krzewski + * @author Daniel Rall + * @author Kasper Nielsen + * @author Steven Caswell + * @author Sean C. Sullivan + * @author Stephen Colebourne + * @version $Id$ + * @since 1.0 + */ +public class NestableDelegate implements Serializable { + + /** + * Constructor error message. + */ + private static final String MUST_BE_THROWABLE = + "The Nestable implementation passed to the NestableDelegate(Nestable) " + + "constructor must extend java.lang.Throwable"; + + /** + * Holds the reference to the exception or error that we're + * wrapping (which must be a {@link + * org.apache.commons.lang.exception.Nestable} implementation). + */ + private Throwable nestable = null; + + /** + * Whether to print the stack trace top-down. + * This public flag may be set by calling code, typically in initialisation. + * + * @since 2.0 + */ + private static boolean topDown = true; + + /** + * Whether to trim the repeated stack trace. + * This public flag may be set by calling code, typically in initialisation. + * + * @since 2.0 + */ + private static boolean trimStackFrames = true; + + /** + * Constructs a new NestableDelegate instance to manage the + * specified Nestable. + * + * @param nestable the Nestable implementation (must extend + * {@link java.lang.Throwable}) + * @since 2.0 + */ + public NestableDelegate(Nestable nestable) { + if ( nestable instanceof Throwable ) { + this.nestable = ( Throwable ) nestable; + } + else { + throw new IllegalArgumentException( MUST_BE_THROWABLE ); + } + } + + /** + * Returns the error message of the Throwable in the chain + * of Throwables at the specified index, numbererd from 0. + * + * @param index the index of the Throwable in the chain of + * Throwables + * @return the error message, or null if the Throwable at the + * specified index in the chain does not contain a message + * @throws IndexOutOfBoundsException if the index argument is + * negative or not less than the count of Throwables in the + * chain + * @since 2.0 + */ + public String getMessage(int index) { + Throwable t = this.getThrowable( index ); + if ( Nestable.class.isInstance( t ) ) { + return ( ( Nestable ) t ).getMessage( 0 ); + } + else { + return t.getMessage(); + } + } + + /** + * Returns the full message contained by the Nestable + * and any nested Throwables. + * + * @param baseMsg the base message to use when creating the full + * message. Should be generally be called via + * nestableHelper.getMessage( super.getMessage() ), + * where super is an instance of {@link + * java.lang.Throwable}. + * @return The concatenated message for this and all nested + * Throwables + * @since 2.0 + */ + public String getMessage(String baseMsg) { + StringBuffer msg = new StringBuffer(); + if ( baseMsg != null ) { + msg.append( baseMsg ); + } + + Throwable nestedCause = ExceptionUtils.getCause( this.nestable ); + if ( nestedCause != null ) { + String causeMsg = nestedCause.getMessage(); + if ( causeMsg != null ) { + if ( baseMsg != null ) { + msg.append( ": " ); + } + msg.append( causeMsg ); + } + + } + return ( msg.length() > 0 ? msg.toString() : null ); + } + + /** + * Returns the error message of this and any nested Throwables + * in an array of Strings, one element for each message. Any + * Throwable not containing a message is represented in the + * array by a null. This has the effect of cause the length of the returned + * array to be equal to the result of the {@link #getThrowableCount()} + * operation. + * + * @return the error messages + * @since 2.0 + */ + public String[] getMessages() { + Throwable[] throwables = this.getThrowables(); + String[] msgs = new String[throwables.length]; + for ( int i = 0; i < throwables.length; i++ ) { + msgs[i] = Nestable.class.isInstance( throwables[i] ) ? + ( ( Nestable ) throwables[i] ).getMessage( 0 ) : + throwables[i].getMessage(); + } + return msgs; + } + + /** + * Returns the Throwable in the chain of + * Throwables at the specified index, numbererd from 0. + * + * @param index the index, numbered from 0, of the Throwable in + * the chain of Throwables + * @return the Throwable + * @throws IndexOutOfBoundsException if the index argument is + * negative or not less than the count of Throwables in the + * chain + * @since 2.0 + */ + public Throwable getThrowable(int index) { + if ( index == 0 ) { + return this.nestable; + } + Throwable[] throwables = this.getThrowables(); + return throwables[index]; + } + + /** + * Returns the number of Throwables contained in the + * Nestable contained by this delegate. + * + * @return the throwable count + * @since 2.0 + */ + public int getThrowableCount() { + return ExceptionUtils.getThrowableCount( this.nestable ); + } + + /** + * Returns this delegate's Nestable and any nested + * Throwables in an array of Throwables, one + * element for each Throwable. + * + * @return the Throwables + * @since 2.0 + */ + public Throwable[] getThrowables() { + return ExceptionUtils.getThrowables( this.nestable ); + } + + /** + * Returns the index, numbered from 0, of the first Throwable + * that matches the specified type in the chain of Throwables + * held in this delegate's Nestable with an index greater than + * or equal to the specified index, or -1 if the type is not found. + * + * @param type Class to be found + * @param fromIndex the index, numbered from 0, of the starting position in + * the chain to be searched + * @return index of the first occurrence of the type in the chain, or -1 if + * the type is not found + * @throws IndexOutOfBoundsException if the fromIndex argument + * is negative or not less than the count of Throwables in the + * chain + * @since 2.0 + */ + public int indexOfThrowable(Class type, int fromIndex) { + if ( fromIndex < 0 ) { + throw new IndexOutOfBoundsException( "The start index was out of bounds: " + fromIndex ); + } + Throwable[] throwables = ExceptionUtils.getThrowables( this.nestable ); + if ( fromIndex >= throwables.length ) { + throw new IndexOutOfBoundsException( "The start index was out of bounds: " + + fromIndex + " >= " + throwables.length ); + } + for ( int i = fromIndex; i < throwables.length; i++ ) { + if ( throwables[i].getClass().equals( type ) ) { + return i; + } + } + return -1; + } + + /** + * Prints the stack trace of this exception the the standar error + * stream. + */ + public void printStackTrace() { + printStackTrace( System.err ); + } + + /** + * Prints the stack trace of this exception to the specified + * stream. + * + * @param out PrintStream to use for output. + * @see #printStackTrace(PrintWriter) + */ + public void printStackTrace(PrintStream out) { + synchronized ( out ) { + PrintWriter pw = new PrintWriter( out, false ); + printStackTrace( pw ); + // Flush the PrintWriter before it's GC'ed. + pw.flush(); + } + } + + /** + * Prints the stack trace of this exception to the specified + * writer. If the Throwable class has a getCause + * method (i.e. running on jre1.4 or higher), this method just + * uses Throwable's printStackTrace() method. Otherwise, generates + * the stack-trace, by taking into account the 'topDown' and + * 'trimStackFrames' parameters. The topDown and trimStackFrames + * are set to 'true' by default (produces jre1.4-like stack trace). + * + * @param out PrintWriter to use for output. + */ + public void printStackTrace(PrintWriter out) { + Throwable throwable = this.nestable; + // if running on jre1.4 or higher, use default printStackTrace + if ( ExceptionUtils.isThrowableNested() ) { + if ( throwable instanceof Nestable ) { + ( ( Nestable ) throwable ).printPartialStackTrace( out ); + } + else { + throwable.printStackTrace( out ); + } + return; + } + + // generating the nested stack trace + List stacks = new ArrayList(); + while ( throwable != null ) { + String[] st = getStackFrames( throwable ); + stacks.add( st ); + throwable = ExceptionUtils.getCause( throwable ); + } + + // If NOT topDown, reverse the stack + String separatorLine = "Caused by: "; + if ( !topDown ) { + separatorLine = "Rethrown as: "; + Collections.reverse( stacks ); + } + + // Remove the repeated lines in the stack + if ( trimStackFrames ) trimStackFrames( stacks ); + + synchronized ( out ) { + for ( Iterator iter = stacks.iterator(); iter.hasNext(); ) { + String[] st = ( String[] ) iter.next(); + for ( int i = 0, len = st.length; i < len; i++ ) { + out.println( st[i] ); + } + if ( iter.hasNext() ) out.print( separatorLine ); + } + } + } + + /** + * Captures the stack trace associated with the specified + * Throwable object, decomposing it into a list of + * stack frames. + * + * @param t The Throwable. + * @return An array of strings describing each stack frame. + * @since 2.0 + */ + protected String[] getStackFrames(Throwable t) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter( sw, true ); + + // Avoid infinite loop between decompose() and printStackTrace(). + if ( t instanceof Nestable ) { + ( ( Nestable ) t ).printPartialStackTrace( pw ); + } + else { + t.printStackTrace( pw ); + } + return ExceptionUtils.getStackFrames( sw.getBuffer().toString() ); + } + + /** + * Trims the stack frames. The first set is left untouched. The rest + * of the frames are truncated from the bottom by comparing with + * one just on top. + * + * @param stacks The list containing String[] elements + * @since 2.0 + */ + protected void trimStackFrames(List stacks) { + for ( int size = stacks.size(), i = size - 1; i > 0; i-- ) { + String[] curr = ( String[] ) stacks.get( i ); + String[] next = ( String[] ) stacks.get( i - 1 ); + + List currList = new ArrayList( Arrays.asList( curr ) ); + List nextList = new ArrayList( Arrays.asList( next ) ); + ExceptionUtils.removeCommonFrames( currList, nextList ); + + int trimmed = curr.length - currList.size(); + if ( trimmed > 0 ) { + currList.add( "\t... " + trimmed + " more" ); + stacks.set( i, + currList.toArray( new String[currList.size()] ) ); + } + } + } +} diff --git a/src/org/hibernate/exception/NestableException.java b/src/org/hibernate/exception/NestableException.java new file mode 100644 index 0000000000..12405f399e --- /dev/null +++ b/src/org/hibernate/exception/NestableException.java @@ -0,0 +1,254 @@ +/* ==================================================================== + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2002-2003 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowledgement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowledgement may appear in the software itself, + * if and wherever such third-party acknowledgements normally appear. + * + * 4. The names "The Jakarta Project", "Commons", and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Software Foundation. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ +package org.hibernate.exception; + +import java.io.PrintStream; +import java.io.PrintWriter; + +/** + * The base class of all exceptions which can contain other exceptions. + *

    + * It is intended to ease the debugging by carrying on the information + * about the exception which was caught and provoked throwing the + * current exception. Catching and rethrowing may occur multiple + * times, and provided that all exceptions except the first one + * are descendands of NestedException, when the + * exception is finally printed out using any of the + * printStackTrace() methods, the stacktrace will contain + * the information about all exceptions thrown and caught on + * the way. + *

    Running the following program + *

    + *  1 import org.apache.commons.lang.exception.NestableException;
    + *  2
    + *  3 public class Test {
    + *  4     public static void main( String[] args ) {
    + *  5         try {
    + *  6             a();
    + *  7         } catch(Exception e) {
    + *  8             e.printStackTrace();
    + *  9         }
    + * 10      }
    + * 11
    + * 12      public static void a() throws Exception {
    + * 13          try {
    + * 14              b();
    + * 15          } catch(Exception e) {
    + * 16              throw new NestableException("foo", e);
    + * 17          }
    + * 18      }
    + * 19
    + * 20      public static void b() throws Exception {
    + * 21          try {
    + * 22              c();
    + * 23          } catch(Exception e) {
    + * 24              throw new NestableException("bar", e);
    + * 25          }
    + * 26      }
    + * 27
    + * 28      public static void c() throws Exception {
    + * 29          throw new Exception("baz");
    + * 30      }
    + * 31 }
    + * 
    + *

    Yields the following stacktrace: + *

    + * org.apache.commons.lang.exception.NestableException: foo
    + *         at Test.a(Test.java:16)
    + *         at Test.main(Test.java:6)
    + * Caused by: org.apache.commons.lang.exception.NestableException: bar
    + *         at Test.b(Test.java:24)
    + *         at Test.a(Test.java:14)
    + *         ... 1 more
    + * Caused by: java.lang.Exception: baz
    + *         at Test.c(Test.java:29)
    + *         at Test.b(Test.java:22)
    + *         ... 2 more
    + * 

    + * + * @author Rafal Krzewski + * @author Daniel Rall + * @author Kasper Nielsen + * @author Steven Caswell + * @version $Id$ + * @since 1.0 + */ +public class NestableException extends Exception implements Nestable { + + /** + * The helper instance which contains much of the code which we + * delegate to. + */ + protected NestableDelegate delegate = new NestableDelegate( this ); + + /** + * Holds the reference to the exception or error that caused + * this exception to be thrown. + */ + private Throwable cause = null; + + /** + * Constructs a new NestableException without specified + * detail message. + */ + public NestableException() { + super(); + } + + /** + * Constructs a new NestableException with specified + * detail message. + * + * @param msg The error message. + */ + public NestableException(String msg) { + super( msg ); + } + + /** + * Constructs a new NestableException with specified + * nested Throwable. + * + * @param cause the exception or error that caused this exception to be + * thrown + */ + public NestableException(Throwable cause) { + super(); + this.cause = cause; + } + + /** + * Constructs a new NestableException with specified + * detail message and nested Throwable. + * + * @param msg the error message + * @param cause the exception or error that caused this exception to be + * thrown + */ + public NestableException(String msg, Throwable cause) { + super( msg ); + this.cause = cause; + } + + public Throwable getCause() { + return cause; + } + + /** + * Returns the detail message string of this throwable. If it was + * created with a null message, returns the following: + * ( cause==null ? null : cause.toString() ). + */ + public String getMessage() { + if ( super.getMessage() != null ) { + return super.getMessage(); + } + else if ( cause != null ) { + return cause.toString(); + } + else { + return null; + } + } + + public String getMessage(int index) { + if ( index == 0 ) { + return super.getMessage(); + } + else { + return delegate.getMessage( index ); + } + } + + public String[] getMessages() { + return delegate.getMessages(); + } + + public Throwable getThrowable(int index) { + return delegate.getThrowable( index ); + } + + public int getThrowableCount() { + return delegate.getThrowableCount(); + } + + public Throwable[] getThrowables() { + return delegate.getThrowables(); + } + + public int indexOfThrowable(Class type) { + return delegate.indexOfThrowable( type, 0 ); + } + + public int indexOfThrowable(Class type, int fromIndex) { + return delegate.indexOfThrowable( type, fromIndex ); + } + + public void printStackTrace() { + delegate.printStackTrace(); + } + + public void printStackTrace(PrintStream out) { + delegate.printStackTrace( out ); + } + + public void printStackTrace(PrintWriter out) { + delegate.printStackTrace( out ); + } + + public final void printPartialStackTrace(PrintWriter out) { + super.printStackTrace( out ); + } + +} diff --git a/src/org/hibernate/exception/NestableRuntimeException.java b/src/org/hibernate/exception/NestableRuntimeException.java new file mode 100644 index 0000000000..7c6ee0ea28 --- /dev/null +++ b/src/org/hibernate/exception/NestableRuntimeException.java @@ -0,0 +1,214 @@ +/* ==================================================================== + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2002-2003 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowledgement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowledgement may appear in the software itself, + * if and wherever such third-party acknowledgements normally appear. + * + * 4. The names "The Jakarta Project", "Commons", and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Software Foundation. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ +package org.hibernate.exception; + +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.ObjectOutputStream; +import java.io.IOException; + +import antlr.RecognitionException; + +/** + * The base class of all runtime exceptions which can contain other + * exceptions. + * + * @author Rafal Krzewski + * @author Daniel Rall + * @author Kasper Nielsen + * @author Steven Caswell + * @version $Id$ + * @see org.apache.commons.lang.exception.NestableException + * @since 1.0 + */ +public class NestableRuntimeException extends RuntimeException implements Nestable { + + /** + * The helper instance which contains much of the code which we + * delegate to. + */ + protected NestableDelegate delegate = new NestableDelegate( this ); + + /** + * Holds the reference to the exception or error that caused + * this exception to be thrown. + */ + private Throwable cause = null; + + /** + * Constructs a new NestableRuntimeException without specified + * detail message. + */ + public NestableRuntimeException() { + super(); + } + + /** + * Constructs a new NestableRuntimeException with specified + * detail message. + * + * @param msg the error message + */ + public NestableRuntimeException(String msg) { + super( msg ); + } + + /** + * Constructs a new NestableRuntimeException with specified + * nested Throwable. + * + * @param cause the exception or error that caused this exception to be + * thrown + */ + public NestableRuntimeException(Throwable cause) { + super(); + this.cause = cause; + } + + /** + * Constructs a new NestableRuntimeException with specified + * detail message and nested Throwable. + * + * @param msg the error message + * @param cause the exception or error that caused this exception to be + * thrown + */ + public NestableRuntimeException(String msg, Throwable cause) { + super( msg ); + this.cause = cause; + } + + public Throwable getCause() { + return cause; + } + + /** + * Returns the detail message string of this throwable. If it was + * created with a null message, returns the following: + * ( cause==null ? null : cause.toString( ). + */ + public String getMessage() { + if ( super.getMessage() != null ) { + return super.getMessage(); + } + else if ( cause != null ) { + return cause.toString(); + } + else { + return null; + } + } + + public String getMessage(int index) { + if ( index == 0 ) { + return super.getMessage(); + } + else { + return delegate.getMessage( index ); + } + } + + public String[] getMessages() { + return delegate.getMessages(); + } + + public Throwable getThrowable(int index) { + return delegate.getThrowable( index ); + } + + public int getThrowableCount() { + return delegate.getThrowableCount(); + } + + public Throwable[] getThrowables() { + return delegate.getThrowables(); + } + + public int indexOfThrowable(Class type) { + return delegate.indexOfThrowable( type, 0 ); + } + + public int indexOfThrowable(Class type, int fromIndex) { + return delegate.indexOfThrowable( type, fromIndex ); + } + + public void printStackTrace() { + delegate.printStackTrace(); + } + + public void printStackTrace(PrintStream out) { + delegate.printStackTrace( out ); + } + + public void printStackTrace(PrintWriter out) { + delegate.printStackTrace( out ); + } + + public final void printPartialStackTrace(PrintWriter out) { + super.printStackTrace( out ); + } + + + + private void writeObject(ObjectOutputStream oos) throws IOException { + Throwable tempCause = cause; + //don't propagate RecognitionException, might be not serializable + if ( cause instanceof RecognitionException ) { + cause = null; + } + oos.defaultWriteObject(); + cause = tempCause; + } + +} diff --git a/src/org/hibernate/exception/SQLExceptionConverter.java b/src/org/hibernate/exception/SQLExceptionConverter.java new file mode 100644 index 0000000000..36115dba92 --- /dev/null +++ b/src/org/hibernate/exception/SQLExceptionConverter.java @@ -0,0 +1,32 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; + +/** + * Defines a contract for implementations that know how to convert a SQLException + * into Hibernate's JDBCException hierarchy. Inspired by Spring's + * SQLExceptionTranslator. + *

    + * Implementations must have a constructor which takes a + * {@link ViolatedConstraintNameExtracter} parameter. + *

    + * Implementations may implement {@link Configurable} if they need to perform + * configuration steps prior to first use. + * + * @author Steve Ebersole + * @see SQLExceptionConverterFactory + */ +public interface SQLExceptionConverter { + /** + * Convert the given SQLException into Hibernate's JDBCException hierarchy. + * + * @param sqlException The SQLException to be converted. + * @param message An optional error message. + * @return The resulting JDBCException. + * @see ConstraintViolationException, JDBCConnectionException, SQLGrammarException, LockAcquisitionException + */ + public JDBCException convert(SQLException sqlException, String message, String sql); +} diff --git a/src/org/hibernate/exception/SQLExceptionConverterFactory.java b/src/org/hibernate/exception/SQLExceptionConverterFactory.java new file mode 100644 index 0000000000..8e708bed16 --- /dev/null +++ b/src/org/hibernate/exception/SQLExceptionConverterFactory.java @@ -0,0 +1,114 @@ +// $Id$ +package org.hibernate.exception; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.JDBCException; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.Dialect; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +import java.lang.reflect.Constructor; +import java.sql.SQLException; +import java.util.Properties; + +/** + * A factory for building SQLExceptionConverter instances. + * + * @author Steve Ebersole + */ +public class SQLExceptionConverterFactory { + + private static final Log log = LogFactory.getLog( SQLExceptionConverterFactory.class ); + + private SQLExceptionConverterFactory() { + // Private constructor - stops checkstyle from complaining. + } + + /** + * Build a SQLExceptionConverter instance. + *

    + * First, looks for a {@link Environment.SQL_EXCEPTION_CONVERTER} property to see + * if the configuration specified the class of a specific converter to use. If this + * property is set, attempt to construct an instance of that class. If not set, or + * if construction fails, the converter specific to the dialect will be used. + * + * @param dialect The defined dialect. + * @param properties The configuration properties. + * @return An appropriate SQLExceptionConverter instance. + * @throws HibernateException There was an error building the SQLExceptionConverter. + */ + public static SQLExceptionConverter buildSQLExceptionConverter(Dialect dialect, Properties properties) throws HibernateException { + SQLExceptionConverter converter = null; + + String converterClassName = ( String ) properties.get( Environment.SQL_EXCEPTION_CONVERTER ); + if ( StringHelper.isNotEmpty( converterClassName ) ) { + converter = constructConverter( converterClassName, dialect.getViolatedConstraintNameExtracter() ); + } + + if ( converter == null ) { + log.trace( "Using dialect defined converter" ); + converter = dialect.buildSQLExceptionConverter(); + } + + if ( converter instanceof Configurable ) { + try { + ( ( Configurable ) converter ).configure( properties ); + } + catch ( HibernateException e ) { + log.warn( "Unable to configure SQLExceptionConverter", e ); + throw e; + } + } + + return converter; + } + + /** + * Builds a minimal converter. The instance returned here just always converts to + * {@link GenericJDBCException}. + * + * @return The minimal converter. + */ + public static SQLExceptionConverter buildMinimalSQLExceptionConverter() { + return new SQLExceptionConverter() { + public JDBCException convert(SQLException sqlException, String message, String sql) { + return new GenericJDBCException( message, sqlException, sql ); + } + }; + } + + private static SQLExceptionConverter constructConverter(String converterClassName, ViolatedConstraintNameExtracter violatedConstraintNameExtracter) { + try { + log.trace( "Attempting to construct instance of specified SQLExceptionConverter [" + converterClassName + "]" ); + Class converterClass = ReflectHelper.classForName( converterClassName ); + + // First, try to find a matching constructor accepting a ViolatedConstraintNameExtracter param... + Constructor[] ctors = converterClass.getDeclaredConstructors(); + for ( int i = 0; i < ctors.length; i++ ) { + if ( ctors[i].getParameterTypes() != null && ctors[i].getParameterTypes().length == 1 ) { + if ( ViolatedConstraintNameExtracter.class.isAssignableFrom( ctors[i].getParameterTypes()[0] ) ) { + try { + return ( SQLExceptionConverter ) + ctors[i].newInstance( new Object[]{violatedConstraintNameExtracter} ); + } + catch ( Throwable t ) { + // eat it and try next + } + } + } + } + + // Otherwise, try to use the no-arg constructor + return ( SQLExceptionConverter ) converterClass.newInstance(); + + } + catch ( Throwable t ) { + log.warn( "Unable to construct instance of specified SQLExceptionConverter", t ); + } + + return null; + } +} diff --git a/src/org/hibernate/exception/SQLGrammarException.java b/src/org/hibernate/exception/SQLGrammarException.java new file mode 100644 index 0000000000..885015669a --- /dev/null +++ b/src/org/hibernate/exception/SQLGrammarException.java @@ -0,0 +1,33 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; + +/** + * Implementation of JDBCException indicating that the SQL sent to the database + * server was invalid (syntax error, invalid object references, etc). + * + * @author Steve Ebersole + */ +public class SQLGrammarException extends JDBCException { + /** + * Constructor for JDBCException. + * + * @param root The underlying exception. + */ + public SQLGrammarException(String message, SQLException root) { + super( message, root ); + } + + /** + * Constructor for JDBCException. + * + * @param message Optional message. + * @param root The underlying exception. + */ + public SQLGrammarException(String message, SQLException root, String sql) { + super( message, root, sql ); + } +} diff --git a/src/org/hibernate/exception/SQLStateConverter.java b/src/org/hibernate/exception/SQLStateConverter.java new file mode 100644 index 0000000000..962e08c93b --- /dev/null +++ b/src/org/hibernate/exception/SQLStateConverter.java @@ -0,0 +1,105 @@ +// $Id$ +package org.hibernate.exception; + +import org.hibernate.JDBCException; + +import java.sql.SQLException; +import java.util.HashSet; +import java.util.Set; + +/** + * A SQLExceptionConverter implementation which performs converion based on + * the underlying SQLState. Interpretation of a SQL error based on SQLState + * is not nearly as accurate as using the ErrorCode (which is, however, vendor- + * specific). Use of a ErrorCode-based converter should be preferred approach + * for converting/interpreting SQLExceptions. + * + * @author Steve Ebersole + */ +public class SQLStateConverter implements SQLExceptionConverter { + + private ViolatedConstraintNameExtracter extracter; + + private static final Set SQL_GRAMMAR_CATEGORIES = new HashSet(); + private static final Set DATA_CATEGORIES = new HashSet(); + private static final Set INTEGRITY_VIOLATION_CATEGORIES = new HashSet(); + private static final Set CONNECTION_CATEGORIES = new HashSet(); + + static { + SQL_GRAMMAR_CATEGORIES.add( "07" ); + SQL_GRAMMAR_CATEGORIES.add( "37" ); + SQL_GRAMMAR_CATEGORIES.add( "42" ); + SQL_GRAMMAR_CATEGORIES.add( "65" ); + SQL_GRAMMAR_CATEGORIES.add( "S0" ); + SQL_GRAMMAR_CATEGORIES.add( "20" ); + + DATA_CATEGORIES.add("22"); + DATA_CATEGORIES.add("21"); + DATA_CATEGORIES.add("02"); + + INTEGRITY_VIOLATION_CATEGORIES.add( "23" ); + INTEGRITY_VIOLATION_CATEGORIES.add( "27" ); + INTEGRITY_VIOLATION_CATEGORIES.add( "44" ); + + CONNECTION_CATEGORIES.add( "08" ); + } + + public SQLStateConverter(ViolatedConstraintNameExtracter extracter) { + this.extracter = extracter; + } + + /** + * Convert the given SQLException into Hibernate's JDBCException hierarchy. + * + * @param sqlException The SQLException to be converted. + * @param message An optional error message. + * @param sql Optionally, the sql being performed when the exception occurred. + * @return The resulting JDBCException. + */ + public JDBCException convert(SQLException sqlException, String message, String sql) { + String sqlState = JDBCExceptionHelper.extractSqlState( sqlException ); + + if ( sqlState != null ) { + String sqlStateClassCode = JDBCExceptionHelper.determineSqlStateClassCode( sqlState ); + + if ( sqlStateClassCode != null ) { + if ( SQL_GRAMMAR_CATEGORIES.contains( sqlStateClassCode ) ) { + return new SQLGrammarException( message, sqlException, sql ); + } + else if ( INTEGRITY_VIOLATION_CATEGORIES.contains( sqlStateClassCode ) ) { + String constraintName = extracter.extractConstraintName( sqlException ); + return new ConstraintViolationException( message, sqlException, sql, constraintName ); + } + else if ( CONNECTION_CATEGORIES.contains( sqlStateClassCode ) ) { + return new JDBCConnectionException( message, sqlException, sql ); + } + else if ( DATA_CATEGORIES.contains( sqlStateClassCode ) ) { + return new DataException( message, sqlException, sql ); + } + } + + if ( "40001".equals( sqlState ) ) { + return new LockAcquisitionException( message, sqlException, sql ); + } + + if ( "61000".equals( sqlState ) ) { + // oracle sql-state code for deadlock + return new LockAcquisitionException( message, sqlException, sql ); + } + } + + return handledNonSpecificException( sqlException, message, sql ); + } + + /** + * Handle an exception not converted to a specific type based on the SQLState. + * + * @param sqlException The exception to be handled. + * @param message An optional message + * @param sql Optionally, the sql being performed when the exception occurred. + * @return The converted exception; should never be null. + */ + protected JDBCException handledNonSpecificException(SQLException sqlException, String message, String sql) { + return new GenericJDBCException( message, sqlException, sql ); + } +} diff --git a/src/org/hibernate/exception/TemplatedViolatedConstraintNameExtracter.java b/src/org/hibernate/exception/TemplatedViolatedConstraintNameExtracter.java new file mode 100644 index 0000000000..07d5936557 --- /dev/null +++ b/src/org/hibernate/exception/TemplatedViolatedConstraintNameExtracter.java @@ -0,0 +1,37 @@ +// $Id$ +package org.hibernate.exception; + + + +/** + * Knows how to extract a violated constraint name from an error message based on the + * fact that the constraint name is templated within the message. + * + * @author Steve Ebersole + */ +public abstract class TemplatedViolatedConstraintNameExtracter implements ViolatedConstraintNameExtracter { + + /** + * Extracts the constraint name based on a template (i.e., templateStartconstraintNametemplateEnd). + * + * @param templateStart The pattern denoting the start of the constraint name within the message. + * @param templateEnd The pattern denoting the end of the constraint name within the message. + * @param message The templated error message containing the constraint name. + * @return The found constraint name, or null. + */ + protected String extractUsingTemplate(String templateStart, String templateEnd, String message) { + int templateStartPosition = message.indexOf( templateStart ); + if ( templateStartPosition < 0 ) { + return null; + } + + int start = templateStartPosition + templateStart.length(); + int end = message.indexOf( templateEnd, start ); + if ( end < 0 ) { + end = message.length(); + } + + return message.substring( start, end ); + } + +} diff --git a/src/org/hibernate/exception/ViolatedConstraintNameExtracter.java b/src/org/hibernate/exception/ViolatedConstraintNameExtracter.java new file mode 100644 index 0000000000..81a546ba88 --- /dev/null +++ b/src/org/hibernate/exception/ViolatedConstraintNameExtracter.java @@ -0,0 +1,20 @@ +// $Id$ +package org.hibernate.exception; + +import java.sql.SQLException; + +/** + * Defines a contract for implementations that can extract the name of a violated + * constraint from a SQLException that is the result of that constraint violation. + * + * @author Steve Ebersole + */ +public interface ViolatedConstraintNameExtracter { + /** + * Extract the name of the violated constraint from the given SQLException. + * + * @param sqle The exception that was the result of the constraint violation. + * @return The extracted constraint name. + */ + public String extractConstraintName(SQLException sqle); +} diff --git a/src/org/hibernate/exception/package.html b/src/org/hibernate/exception/package.html new file mode 100755 index 0000000000..88654ef7d5 --- /dev/null +++ b/src/org/hibernate/exception/package.html @@ -0,0 +1,8 @@ + + + +

    + This package is a fork of Apache commons-lang nestable exceptions. +

    + + diff --git a/src/org/hibernate/hibernate-configuration-3.0.dtd b/src/org/hibernate/hibernate-configuration-3.0.dtd new file mode 100644 index 0000000000..a22cd7b50e --- /dev/null +++ b/src/org/hibernate/hibernate-configuration-3.0.dtd @@ -0,0 +1,52 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/org/hibernate/hibernate-mapping-3.0.dtd b/src/org/hibernate/hibernate-mapping-3.0.dtd new file mode 100644 index 0000000000..de7bf3d1ff --- /dev/null +++ b/src/org/hibernate/hibernate-mapping-3.0.dtd @@ -0,0 +1,1036 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/org/hibernate/hql/CollectionProperties.java b/src/org/hibernate/hql/CollectionProperties.java new file mode 100644 index 0000000000..2ec0cfb86d --- /dev/null +++ b/src/org/hibernate/hql/CollectionProperties.java @@ -0,0 +1,53 @@ +// $Id$ +package org.hibernate.hql; + +import org.hibernate.persister.collection.CollectionPropertyNames; + +import java.util.HashMap; +import java.util.Map; + +/** + * Provides a map of collection function names to the corresponding property names. + * + * @author josh Aug 16, 2004 7:51:45 PM + */ +public final class CollectionProperties { + public static final Map HQL_COLLECTION_PROPERTIES; + + private static final String COLLECTION_INDEX_LOWER = CollectionPropertyNames.COLLECTION_INDEX.toLowerCase(); + + static { + HQL_COLLECTION_PROPERTIES = new HashMap(); + HQL_COLLECTION_PROPERTIES.put( CollectionPropertyNames.COLLECTION_ELEMENTS.toLowerCase(), CollectionPropertyNames.COLLECTION_ELEMENTS ); + HQL_COLLECTION_PROPERTIES.put( CollectionPropertyNames.COLLECTION_INDICES.toLowerCase(), CollectionPropertyNames.COLLECTION_INDICES ); + HQL_COLLECTION_PROPERTIES.put( CollectionPropertyNames.COLLECTION_SIZE.toLowerCase(), CollectionPropertyNames.COLLECTION_SIZE ); + HQL_COLLECTION_PROPERTIES.put( CollectionPropertyNames.COLLECTION_MAX_INDEX.toLowerCase(), CollectionPropertyNames.COLLECTION_MAX_INDEX ); + HQL_COLLECTION_PROPERTIES.put( CollectionPropertyNames.COLLECTION_MIN_INDEX.toLowerCase(), CollectionPropertyNames.COLLECTION_MIN_INDEX ); + HQL_COLLECTION_PROPERTIES.put( CollectionPropertyNames.COLLECTION_MAX_ELEMENT.toLowerCase(), CollectionPropertyNames.COLLECTION_MAX_ELEMENT ); + HQL_COLLECTION_PROPERTIES.put( CollectionPropertyNames.COLLECTION_MIN_ELEMENT.toLowerCase(), CollectionPropertyNames.COLLECTION_MIN_ELEMENT ); + HQL_COLLECTION_PROPERTIES.put( COLLECTION_INDEX_LOWER, CollectionPropertyNames.COLLECTION_INDEX ); + } + + private CollectionProperties() { + } + + public static boolean isCollectionProperty(String name) { + String key = name.toLowerCase(); + // CollectionPropertyMapping processes everything except 'index'. + if ( COLLECTION_INDEX_LOWER.equals( key ) ) { + return false; + } + else { + return HQL_COLLECTION_PROPERTIES.containsKey( key ); + } + } + + public static String getNormalizedPropertyName(String name) { + return ( String ) HQL_COLLECTION_PROPERTIES.get( name ); + } + + public static boolean isAnyCollectionProperty(String name) { + String key = name.toLowerCase(); + return HQL_COLLECTION_PROPERTIES.containsKey( key ); + } +} diff --git a/src/org/hibernate/hql/CollectionSubqueryFactory.java b/src/org/hibernate/hql/CollectionSubqueryFactory.java new file mode 100644 index 0000000000..fc7dd1788a --- /dev/null +++ b/src/org/hibernate/hql/CollectionSubqueryFactory.java @@ -0,0 +1,44 @@ +// $Id$ +package org.hibernate.hql; + +import org.hibernate.engine.JoinSequence; +import org.hibernate.sql.JoinFragment; +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.util.StringHelper; + +import java.util.Map; + +/** + * Provides the SQL for collection subqueries. + *
    + * Moved here from PathExpressionParser to make it re-useable. + * + * @author josh + */ +public final class CollectionSubqueryFactory { + + //TODO: refactor to .sql package + + private CollectionSubqueryFactory() { + } + + public static String createCollectionSubquery( + JoinSequence joinSequence, + Map enabledFilters, + String[] columns) { + try { + JoinFragment join = joinSequence.toJoinFragment( enabledFilters, true ); + return new StringBuffer( "select " ) + .append( StringHelper.join( ", ", columns ) ) + .append( " from " ) + .append( join.toFromFragmentString().substring( 2 ) )// remove initial ", " + .append( " where " ) + .append( join.toWhereFragmentString().substring( 5 ) )// remove initial " and " + .toString(); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + } +} diff --git a/src/org/hibernate/hql/FilterTranslator.java b/src/org/hibernate/hql/FilterTranslator.java new file mode 100644 index 0000000000..11b92a018a --- /dev/null +++ b/src/org/hibernate/hql/FilterTranslator.java @@ -0,0 +1,28 @@ +// $Id$ +package org.hibernate.hql; + +import org.hibernate.MappingException; +import org.hibernate.QueryException; + +import java.util.Map; + + +/** + * Specialized interface for filters. + * + * @author josh Mar 14, 2004 11:33:35 AM + */ +public interface FilterTranslator extends QueryTranslator { + /** + * Compile a filter. This method may be called multiple + * times. Subsequent invocations are no-ops. + * + * @param collectionRole the role name of the collection used as the basis for the filter. + * @param replacements Defined query substitutions. + * @param shallow Does this represent a shallow (scalar or entity-id) select? + * @throws QueryException There was a problem parsing the query string. + * @throws MappingException There was a problem querying defined mappings. + */ + void compile(String collectionRole, Map replacements, boolean shallow) + throws QueryException, MappingException; +} diff --git a/src/org/hibernate/hql/HolderInstantiator.java b/src/org/hibernate/hql/HolderInstantiator.java new file mode 100755 index 0000000000..d9210e3483 --- /dev/null +++ b/src/org/hibernate/hql/HolderInstantiator.java @@ -0,0 +1,81 @@ +//$Id$ +package org.hibernate.hql; + +import java.lang.reflect.Constructor; + +import org.hibernate.transform.AliasToBeanConstructorResultTransformer; +import org.hibernate.transform.ResultTransformer; +import org.hibernate.transform.Transformers; + +/** + * @author Gavin King + */ +public final class HolderInstantiator { + + public static final HolderInstantiator NOOP_INSTANTIATOR = new HolderInstantiator(null,null); + + private final ResultTransformer transformer; + private final String[] queryReturnAliases; + + public static HolderInstantiator getHolderInstantiator(ResultTransformer selectNewTransformer, ResultTransformer customTransformer, String[] queryReturnAliases) { + if(selectNewTransformer!=null) { + return new HolderInstantiator(selectNewTransformer, queryReturnAliases); + } else { + return new HolderInstantiator(customTransformer, queryReturnAliases); + } + } + + public static ResultTransformer createSelectNewTransformer(Constructor constructor, boolean returnMaps, boolean returnLists) { + if ( constructor != null ) { + return new AliasToBeanConstructorResultTransformer(constructor); + } + else if ( returnMaps ) { + return Transformers.ALIAS_TO_ENTITY_MAP; + } + else if ( returnLists ) { + return Transformers.TO_LIST; + } + else { + return null; + } + } + + static public HolderInstantiator createClassicHolderInstantiator(Constructor constructor, + ResultTransformer transformer) { + if ( constructor != null ) { + return new HolderInstantiator(new AliasToBeanConstructorResultTransformer(constructor), null); + } + else { + return new HolderInstantiator(transformer, null); + } + } + + public HolderInstantiator( + ResultTransformer transformer, + String[] queryReturnAliases + ) { + this.transformer = transformer; + this.queryReturnAliases = queryReturnAliases; + } + + public boolean isRequired() { + return transformer!=null; + } + + public Object instantiate(Object[] row) { + if(transformer==null) { + return row; + } else { + return transformer.transformTuple(row, queryReturnAliases); + } + } + + public String[] getQueryReturnAliases() { + return queryReturnAliases; + } + + public ResultTransformer getResultTransformer() { + return transformer; + } + +} diff --git a/src/org/hibernate/hql/NameGenerator.java b/src/org/hibernate/hql/NameGenerator.java new file mode 100644 index 0000000000..57f723da99 --- /dev/null +++ b/src/org/hibernate/hql/NameGenerator.java @@ -0,0 +1,41 @@ +// $Id$ +package org.hibernate.hql; + +import org.hibernate.MappingException; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.type.Type; + +/** + * Provides utility methods for generating HQL / SQL names. Shared by both the 'classic' and 'new' query translators. + * + * @author josh Mar 18, 2004 7:17:25 AM + */ +public final class NameGenerator { + /** + * Private empty constructor (checkstyle says utility classes should not have default constructors). + */ + private NameGenerator() { + } + + public static String[][] generateColumnNames(Type[] types, SessionFactoryImplementor f) throws MappingException { + String[][] columnNames = new String[types.length][]; + for ( int i = 0; i < types.length; i++ ) { + int span = types[i].getColumnSpan( f ); + columnNames[i] = new String[span]; + for ( int j = 0; j < span; j++ ) { + columnNames[i][j] = NameGenerator.scalarName( i, j ); + } + } + return columnNames; + } + + public static String scalarName(int x, int y) { + return new StringBuffer() + .append( "col_" ) + .append( x ) + .append( '_' ) + .append( y ) + .append( '_' ) + .toString(); + } +} diff --git a/src/org/hibernate/hql/ParameterTranslations.java b/src/org/hibernate/hql/ParameterTranslations.java new file mode 100644 index 0000000000..9372efe92c --- /dev/null +++ b/src/org/hibernate/hql/ParameterTranslations.java @@ -0,0 +1,27 @@ +package org.hibernate.hql; + +import org.hibernate.type.Type; +import java.util.Set; + +/** + * Defines available information about the parameters encountered during + * query translation. + * + * @author Steve Ebersole + */ +public interface ParameterTranslations { + + public boolean supportsOrdinalParameterMetadata(); + + public int getOrdinalParameterCount(); + + public int getOrdinalParameterSqlLocation(int ordinalPosition); + + public Type getOrdinalParameterExpectedType(int ordinalPosition); + + public Set getNamedParameterNames(); + + public int[] getNamedParameterSqlLocations(String name); + + public Type getNamedParameterExpectedType(String name); +} diff --git a/src/org/hibernate/hql/QueryExecutionRequestException.java b/src/org/hibernate/hql/QueryExecutionRequestException.java new file mode 100644 index 0000000000..dfa5112e33 --- /dev/null +++ b/src/org/hibernate/hql/QueryExecutionRequestException.java @@ -0,0 +1,16 @@ +//$Id: $ +package org.hibernate.hql; + +import org.hibernate.QueryException; + +/** + * Expecting to execute an illegal operation regarding the query type + * + * @author Emmanuel Bernard + */ +public class QueryExecutionRequestException extends QueryException { + + public QueryExecutionRequestException(String message, String queryString) { + super( message, queryString ); + } +} diff --git a/src/org/hibernate/hql/QuerySplitter.java b/src/org/hibernate/hql/QuerySplitter.java new file mode 100644 index 0000000000..7729f17c51 --- /dev/null +++ b/src/org/hibernate/hql/QuerySplitter.java @@ -0,0 +1,134 @@ +//$Id$ +package org.hibernate.hql; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.MappingException; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.classic.ParserHelper; +import org.hibernate.util.StringHelper; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Set; + +/** + * Provides query splitting methods, which were originally in QueryTranslator. + *
    + * TODO: This will need to be refactored at some point. + * + * @author josh Mar 14, 2004 10:50:23 AM + */ +public final class QuerySplitter { + + private static final Log log = LogFactory.getLog( QuerySplitter.class ); + + private static final Set BEFORE_CLASS_TOKENS = new HashSet(); + private static final Set NOT_AFTER_CLASS_TOKENS = new HashSet(); + + static { + BEFORE_CLASS_TOKENS.add( "from" ); + BEFORE_CLASS_TOKENS.add( "delete" ); + BEFORE_CLASS_TOKENS.add( "update" ); + //beforeClassTokens.add("new"); DEFINITELY DON'T HAVE THIS!! + BEFORE_CLASS_TOKENS.add( "," ); + NOT_AFTER_CLASS_TOKENS.add( "in" ); + //notAfterClassTokens.add(","); + NOT_AFTER_CLASS_TOKENS.add( "from" ); + NOT_AFTER_CLASS_TOKENS.add( ")" ); + } + + /** + * Private empty constructor. + * (or else checkstyle says: 'warning: Utility classes should not have a public or default constructor.') + */ + private QuerySplitter() { + } + + /** + * Handle Hibernate "implicit" polymorphism, by translating the query string into + * several "concrete" queries against mapped classes. + */ + public static String[] concreteQueries(String query, SessionFactoryImplementor factory) throws MappingException { + + //scan the query string for class names appearing in the from clause and replace + //with all persistent implementors of the class/interface, returning multiple + //query strings (make sure we don't pick up a class in the select clause!) + + //TODO: this is one of the ugliest and most fragile pieces of code in Hibernate.... + + String[] tokens = StringHelper.split( StringHelper.WHITESPACE + "(),", query, true ); + if ( tokens.length == 0 ) return new String[]{query}; // just especially for the trivial collection filter + ArrayList placeholders = new ArrayList(); + ArrayList replacements = new ArrayList(); + StringBuffer templateQuery = new StringBuffer( 40 ); + int count = 0; + String last = null; + int nextIndex = 0; + String next = null; + boolean isSelectClause = false; + + templateQuery.append( tokens[0] ); + if ( "select".equals( tokens[0].toLowerCase() ) ) isSelectClause = true; + + for ( int i = 1; i < tokens.length; i++ ) { + + //update last non-whitespace token, if necessary + if ( !ParserHelper.isWhitespace( tokens[i - 1] ) ) last = tokens[i - 1].toLowerCase(); + + // select-range is terminated by declaration of "from" + if ( "from".equals( tokens[i].toLowerCase() ) ) isSelectClause = false; + + String token = tokens[i]; + if ( !ParserHelper.isWhitespace( token ) || last == null ) { + + //scan for next non-whitespace token + if ( nextIndex <= i ) { + for ( nextIndex = i + 1; nextIndex < tokens.length; nextIndex++ ) { + next = tokens[nextIndex].toLowerCase(); + if ( !ParserHelper.isWhitespace( next ) ) break; + } + } + + boolean process = !isSelectClause && + isJavaIdentifier( token ) && + isPossiblyClassName( last, next ); + + if (process) { + String importedClassName = getImportedClass( token, factory ); + if ( importedClassName != null ) { + String[] implementors = factory.getImplementors( importedClassName ); + String placeholder = "$clazz" + count++ + "$"; + if ( implementors != null ) { + placeholders.add( placeholder ); + replacements.add( implementors ); + } + token = placeholder; // Note this!! + } + } + + } + + templateQuery.append( token ); + + } + String[] results = StringHelper.multiply( templateQuery.toString(), placeholders.iterator(), replacements.iterator() ); + if ( results.length == 0 ) log.warn( "no persistent classes found for query class: " + query ); + return results; + } + + private static boolean isPossiblyClassName(String last, String next) { + return "class".equals( last ) || ( + BEFORE_CLASS_TOKENS.contains( last ) && + !NOT_AFTER_CLASS_TOKENS.contains( next ) + ); + } + + private static boolean isJavaIdentifier(String token) { + return Character.isJavaIdentifierStart( token.charAt( 0 ) ); + } + + public static String getImportedClass(String name, SessionFactoryImplementor factory) { + return factory.getImportedClassName( name ); + } +} diff --git a/src/org/hibernate/hql/QueryTranslator.java b/src/org/hibernate/hql/QueryTranslator.java new file mode 100644 index 0000000000..1d8873d822 --- /dev/null +++ b/src/org/hibernate/hql/QueryTranslator.java @@ -0,0 +1,167 @@ +//$Id$ +package org.hibernate.hql; + +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.ScrollableResults; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.type.Type; + +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Defines the constract of an HQL->SQL translator. + * + * @author josh + */ +public interface QueryTranslator { + + // Error message constants. + public static final String ERROR_CANNOT_FETCH_WITH_ITERATE = "fetch may not be used with scroll() or iterate()"; + public static final String ERROR_NAMED_PARAMETER_DOES_NOT_APPEAR = "Named parameter does not appear in Query: "; + public static final String ERROR_CANNOT_DETERMINE_TYPE = "Could not determine type of: "; + public static final String ERROR_CANNOT_FORMAT_LITERAL = "Could not format constant value to SQL literal: "; + + /** + * Compile a "normal" query. This method may be called multiple + * times. Subsequent invocations are no-ops. + * + * @param replacements Defined query substitutions. + * @param shallow Does this represent a shallow (scalar or entity-id) select? + * @throws QueryException There was a problem parsing the query string. + * @throws MappingException There was a problem querying defined mappings. + */ + void compile(Map replacements, boolean shallow) throws QueryException, MappingException; + + /** + * Perform a list operation given the underlying query definition. + * + * @param session The session owning this query. + * @param queryParameters The query bind parameters. + * @return The query list results. + * @throws HibernateException + */ + List list(SessionImplementor session, QueryParameters queryParameters) + throws HibernateException; + + /** + * Perform an iterate operation given the underlying query defintion. + * + * @param queryParameters The query bind parameters. + * @param session The session owning this query. + * @return An iterator over the query results. + * @throws HibernateException + */ + Iterator iterate(QueryParameters queryParameters, EventSource session) + throws HibernateException; + + /** + * Perform a scroll operation given the underlying query defintion. + * + * @param queryParameters The query bind parameters. + * @param session The session owning this query. + * @return The ScrollableResults wrapper around the query results. + * @throws HibernateException + */ + ScrollableResults scroll(QueryParameters queryParameters, SessionImplementor session) + throws HibernateException; + + /** + * Perform a bulk update/delete operation given the underlying query defintion. + * + * @param queryParameters The query bind parameters. + * @param session The session owning this query. + * @return The number of entities updated or deleted. + * @throws HibernateException + */ + int executeUpdate(QueryParameters queryParameters, SessionImplementor session) + throws HibernateException; + + /** + * Returns the set of query spaces (table names) that the query referrs to. + * + * @return A set of query spaces (table names). + */ + Set getQuerySpaces(); + + /** + * Retrieve the query identifier for this translator. The query identifier is + * used in stats collection. + * + * @return the identifier + */ + String getQueryIdentifier(); + + /** + * Returns the SQL string generated by the translator. + * + * @return the SQL string generated by the translator. + */ + String getSQLString(); + + List collectSqlStrings(); + + /** + * Returns the HQL string processed by the translator. + * + * @return the HQL string processed by the translator. + */ + String getQueryString(); + + /** + * Returns the filters enabled for this query translator. + * + * @return Filters enabled for this query execution. + */ + Map getEnabledFilters(); + + /** + * Returns an array of Types represented in the query result. + * + * @return Query return types. + */ + Type[] getReturnTypes(); + + /** + * Returns an array of HQL aliases + */ + String[] getReturnAliases(); + + /** + * Returns the column names in the generated SQL. + * + * @return the column names in the generated SQL. + */ + String[][] getColumnNames(); + + /** + * Return information about any parameters encountered during + * translation. + * + * @return The parameter information. + */ + ParameterTranslations getParameterTranslations(); + + /** + * Validate the scrollability of the translated query. + * + * @throws HibernateException + */ + void validateScrollability() throws HibernateException; + + /** + * Does the translated query contain collection fetches? + * + * @return tru if the query does contain collection fetched; + * false otherwise. + */ + boolean containsCollectionFetches(); + + boolean isManipulationStatement(); +} diff --git a/src/org/hibernate/hql/QueryTranslatorFactory.java b/src/org/hibernate/hql/QueryTranslatorFactory.java new file mode 100644 index 0000000000..6828abc979 --- /dev/null +++ b/src/org/hibernate/hql/QueryTranslatorFactory.java @@ -0,0 +1,37 @@ +//$Id$ +package org.hibernate.hql; + +import org.hibernate.engine.SessionFactoryImplementor; + +import java.util.Map; + +/** + * Facade for generation of {@link QueryTranslator} and {@link FilterTranslator} instances. + * + * @author Gavin King + */ +public interface QueryTranslatorFactory { + /** + * Construct a {@link QueryTranslator} instance capable of translating + * an HQL query string. + * + * @param queryIdentifier The query-identifier (used in + * {@link org.hibernate.stat.QueryStatistics} collection). This is + * typically the same as the queryString parameter except for the case of + * split polymorphic queries which result in multiple physical sql + * queries. + * @param queryString The query string to be translated + * @param filters Currently enabled filters + * @param factory The session factory. + * @return an appropriate translator. + */ + public QueryTranslator createQueryTranslator(String queryIdentifier, String queryString, Map filters, SessionFactoryImplementor factory); + + /** + * Construct a {@link FilterTranslator} instance capable of translating + * an HQL filter string. + * + * @see #createQueryTranslator + */ + public FilterTranslator createFilterTranslator(String queryIdentifier, String queryString, Map filters, SessionFactoryImplementor factory); +} diff --git a/src/org/hibernate/hql/antlr/package.html b/src/org/hibernate/hql/antlr/package.html new file mode 100644 index 0000000000..a6a1dda6e9 --- /dev/null +++ b/src/org/hibernate/hql/antlr/package.html @@ -0,0 +1,8 @@ + + + +

    A special package for ANTLR-generated parser classes.

    +

    NOTE: The classes in this package are generated from the ANTLR grammar files, +do not register them into version control.

    + + diff --git a/src/org/hibernate/hql/ast/ASTQueryTranslatorFactory.java b/src/org/hibernate/hql/ast/ASTQueryTranslatorFactory.java new file mode 100755 index 0000000000..3b5f1852dd --- /dev/null +++ b/src/org/hibernate/hql/ast/ASTQueryTranslatorFactory.java @@ -0,0 +1,49 @@ +//$Id$ +package org.hibernate.hql.ast; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.FilterTranslator; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.hql.QueryTranslatorFactory; + +import java.util.Map; + +/** + * Generates translators which uses the Antlr-based parser to perform + * the translation. + * + * @author Gavin King + */ +public class ASTQueryTranslatorFactory implements QueryTranslatorFactory { + + private static final Log log = LogFactory.getLog( ASTQueryTranslatorFactory.class ); + + public ASTQueryTranslatorFactory() { + log.info( "Using ASTQueryTranslatorFactory" ); + } + + /** + * @see QueryTranslatorFactory#createQueryTranslator + */ + public QueryTranslator createQueryTranslator( + String queryIdentifier, + String queryString, + Map filters, + SessionFactoryImplementor factory) { + return new QueryTranslatorImpl( queryIdentifier, queryString, filters, factory ); + } + + /** + * @see QueryTranslatorFactory#createFilterTranslator + */ + public FilterTranslator createFilterTranslator( + String queryIdentifier, + String queryString, + Map filters, + SessionFactoryImplementor factory) { + return new QueryTranslatorImpl( queryIdentifier, queryString, filters, factory ); + } + +} diff --git a/src/org/hibernate/hql/ast/DetailedSemanticException.java b/src/org/hibernate/hql/ast/DetailedSemanticException.java new file mode 100644 index 0000000000..ebf8ce2359 --- /dev/null +++ b/src/org/hibernate/hql/ast/DetailedSemanticException.java @@ -0,0 +1,77 @@ +// $Id$ +package org.hibernate.hql.ast; + +import antlr.SemanticException; + +import java.io.PrintStream; +import java.io.PrintWriter; + +/** + * Thrown when a call to the underlying Hibernate engine fails, indicating + * some form of semantic exception (e.g. a class name was not found in the + * current mappings, etc.). + */ +public class DetailedSemanticException extends SemanticException { + private Throwable cause; + private boolean showCauseMessage = true; + + public DetailedSemanticException(String message) { + super( message ); + } + + public DetailedSemanticException(String s, Throwable e) { + super( s ); + cause = e; + } + + /** + * Converts everything to a string. + * + * @return a string. + */ + public String toString() { + if ( cause == null || ( !showCauseMessage ) ) { + return super.toString(); + } + else { + return super.toString() + "\n[cause=" + cause.toString() + "]"; + } + } + + /** + * Prints a stack trace. + */ + public void printStackTrace() { + super.printStackTrace(); + if ( cause != null ) { + cause.printStackTrace(); + } + } + + /** + * Prints a stack trace to the specified print stream. + * + * @param s the print stream. + */ + public void printStackTrace(PrintStream s) { + super.printStackTrace( s ); + if ( cause != null ) { + s.println( "Cause:" ); + cause.printStackTrace( s ); + } + } + + /** + * Prints this throwable and its backtrace to the specified print writer. + * + * @param w the print writer.s + */ + public void printStackTrace(PrintWriter w) { + super.printStackTrace( w ); + if ( cause != null ) { + w.println( "Cause:" ); + cause.printStackTrace( w ); + } + } + +} diff --git a/src/org/hibernate/hql/ast/ErrorCounter.java b/src/org/hibernate/hql/ast/ErrorCounter.java new file mode 100644 index 0000000000..32a7532f27 --- /dev/null +++ b/src/org/hibernate/hql/ast/ErrorCounter.java @@ -0,0 +1,72 @@ +// $Id$ +package org.hibernate.hql.ast; + +import antlr.RecognitionException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.QueryException; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * An error handler that counts parsing errors and warnings. + */ +public class ErrorCounter implements ParseErrorHandler { + private Log log = LogFactory.getLog( ErrorCounter.class ); + private Log hqlLog = LogFactory.getLog( "org.hibernate.hql.PARSER" ); + + private List errorList = new ArrayList(); + private List warningList = new ArrayList(); + private List recognitionExceptions = new ArrayList(); + + public void reportError(RecognitionException e) { + reportError( e.toString() ); + recognitionExceptions.add( e ); + if ( log.isDebugEnabled() ) { + log.debug( e, e ); + } + } + + public void reportError(String message) { + hqlLog.error( message ); + errorList.add( message ); + } + + public int getErrorCount() { + return errorList.size(); + } + + public void reportWarning(String message) { + hqlLog.debug( message ); + warningList.add( message ); + } + + private String getErrorString() { + StringBuffer buf = new StringBuffer(); + for ( Iterator iterator = errorList.iterator(); iterator.hasNext(); ) { + buf.append( ( String ) iterator.next() ); + if ( iterator.hasNext() ) buf.append( "\n" ); + + } + return buf.toString(); + } + + public void throwQueryException() throws QueryException { + if ( getErrorCount() > 0 ) { + if ( recognitionExceptions.size() > 0 ) { + throw QuerySyntaxException.convert( ( RecognitionException ) recognitionExceptions.get( 0 ) ); + } + else { + throw new QueryException( getErrorString() ); + } + } + else { + // all clear + if ( log.isDebugEnabled() ) { + log.debug( "throwQueryException() : no errors" ); + } + } + } +} diff --git a/src/org/hibernate/hql/ast/ErrorReporter.java b/src/org/hibernate/hql/ast/ErrorReporter.java new file mode 100644 index 0000000000..b1b33d0091 --- /dev/null +++ b/src/org/hibernate/hql/ast/ErrorReporter.java @@ -0,0 +1,17 @@ +// $Id$ +package org.hibernate.hql.ast; + +import antlr.RecognitionException; + +/** + * Implementations will report or handle errors invoked by an ANTLR base parser. + * + * @author josh Jun 27, 2004 9:49:55 PM + */ +public interface ErrorReporter { + void reportError(RecognitionException e); + + void reportError(String s); + + void reportWarning(String s); +} diff --git a/src/org/hibernate/hql/ast/HqlASTFactory.java b/src/org/hibernate/hql/ast/HqlASTFactory.java new file mode 100644 index 0000000000..72cf433a44 --- /dev/null +++ b/src/org/hibernate/hql/ast/HqlASTFactory.java @@ -0,0 +1,22 @@ +package org.hibernate.hql.ast; + +import antlr.ASTFactory; +import org.hibernate.hql.ast.tree.Node; + +/** + * User: Joshua Davis
    + * Date: Sep 23, 2005
    + * Time: 12:30:01 PM
    + */ +public class HqlASTFactory extends ASTFactory { + + /** + * Returns the class for a given token type (a.k.a. AST node type). + * + * @param tokenType The token type. + * @return Class - The AST node class to instantiate. + */ + public Class getASTNodeType(int tokenType) { + return Node.class; + } +} diff --git a/src/org/hibernate/hql/ast/HqlLexer.java b/src/org/hibernate/hql/ast/HqlLexer.java new file mode 100644 index 0000000000..38f4968482 --- /dev/null +++ b/src/org/hibernate/hql/ast/HqlLexer.java @@ -0,0 +1,59 @@ +// $Id$ +package org.hibernate.hql.ast; + +import java.io.InputStream; +import java.io.Reader; + +import antlr.Token; +import org.hibernate.QueryException; +import org.hibernate.hql.antlr.HqlBaseLexer; + +/** + * Custom lexer for the HQL grammar. Extends the base lexer generated by ANTLR + * in order to keep the grammar source file clean. + */ +class HqlLexer extends HqlBaseLexer { + /** + * A logger for this class. * + */ + private boolean possibleID = false; + + public HqlLexer(InputStream in) { + super( in ); + } + + public HqlLexer(Reader in) { + super(in); + } + + public void setTokenObjectClass(String cl) { + // Ignore the token class name parameter, and use a specific token class. + super.setTokenObjectClass( HqlToken.class.getName() ); + } + + protected void setPossibleID(boolean possibleID) { + this.possibleID = possibleID; + } + + protected Token makeToken(int i) { + HqlToken token = ( HqlToken ) super.makeToken( i ); + token.setPossibleID( possibleID ); + possibleID = false; + return token; + } + + public int testLiteralsTable(int i) { + int ttype = super.testLiteralsTable( i ); + return ttype; + } + + public void panic() { + //overriden to avoid System.exit + panic("CharScanner: panic"); + } + + public void panic(String s) { + //overriden to avoid System.exit + throw new QueryException(s); + } +} diff --git a/src/org/hibernate/hql/ast/HqlParser.java b/src/org/hibernate/hql/ast/HqlParser.java new file mode 100644 index 0000000000..e9f366efe6 --- /dev/null +++ b/src/org/hibernate/hql/ast/HqlParser.java @@ -0,0 +1,338 @@ +// $Id$ +package org.hibernate.hql.ast; + +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.StringReader; + +import antlr.ASTPair; +import antlr.MismatchedTokenException; +import antlr.RecognitionException; +import antlr.Token; +import antlr.TokenStream; +import antlr.TokenStreamException; +import antlr.collections.AST; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.hql.antlr.HqlBaseParser; +import org.hibernate.hql.antlr.HqlTokenTypes; +import org.hibernate.hql.ast.util.ASTPrinter; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.QueryException; + +/** + * Implements the semantic action methods defined in the HQL base parser to keep the grammar + * source file a little cleaner. Extends the parser class generated by ANTLR. + * + * @author Joshua Davis (pgmjsd@sourceforge.net) + */ +public final class HqlParser extends HqlBaseParser { + /** + * A logger for this class. + */ + private static final Log log = LogFactory.getLog( HqlParser.class ); + + private ParseErrorHandler parseErrorHandler; + private ASTPrinter printer = getASTPrinter(); + + private static ASTPrinter getASTPrinter() { + return new ASTPrinter( org.hibernate.hql.antlr.HqlTokenTypes.class ); + } + + public static HqlParser getInstance(String hql) { + // [jsd] The fix for HHH-558... + HqlLexer lexer = new HqlLexer( new StringReader( hql ) ); + return new HqlParser( lexer ); + } + + private HqlParser(TokenStream lexer) { + super( lexer ); + initialize(); + } + + public void reportError(RecognitionException e) { + parseErrorHandler.reportError( e ); // Use the delegate. + } + + public void reportError(String s) { + parseErrorHandler.reportError( s ); // Use the delegate. + } + + public void reportWarning(String s) { + parseErrorHandler.reportWarning( s ); + } + + public ParseErrorHandler getParseErrorHandler() { + return parseErrorHandler; + } + + /** + * Overrides the base behavior to retry keywords as identifiers. + * + * @param token The token. + * @param ex The recognition exception. + * @return AST - The new AST. + * @throws antlr.RecognitionException if the substitution was not possible. + * @throws antlr.TokenStreamException if the substitution was not possible. + */ + public AST handleIdentifierError(Token token, RecognitionException ex) throws RecognitionException, TokenStreamException { + // If the token can tell us if it could be an identifier... + if ( token instanceof HqlToken ) { + HqlToken hqlToken = ( HqlToken ) token; + // ... and the token could be an identifer and the error is + // a mismatched token error ... + if ( hqlToken.isPossibleID() && ( ex instanceof MismatchedTokenException ) ) { + MismatchedTokenException mte = ( MismatchedTokenException ) ex; + // ... and the expected token type was an identifier, then: + if ( mte.expecting == HqlTokenTypes.IDENT ) { + // Use the token as an identifier. + reportWarning( "Keyword '" + + token.getText() + + "' is being interpreted as an identifier due to: " + mte.getMessage() ); + // Add the token to the AST. + ASTPair currentAST = new ASTPair(); + token.setType( HqlTokenTypes.WEIRD_IDENT ); + astFactory.addASTChild( currentAST, astFactory.create( token ) ); + consume(); + AST identifierAST = currentAST.root; + return identifierAST; + } + } // if + } // if + // Otherwise, handle the error normally. + return super.handleIdentifierError( token, ex ); + } + + /** + * Returns an equivalent tree for (NOT (a relop b) ), for example:
    +	 * (NOT (GT a b) ) => (LE a b)
    +	 * 
    + * + * @param x The sub tree to transform, the parent is assumed to be NOT. + * @return AST - The equivalent sub-tree. + */ + public AST negateNode(AST x) { + //TODO: switch statements are always evil! We already had bugs because + // of forgotten token types. Use polymorphism for this! + switch ( x.getType() ) { + case OR: + x.setType(AND); + x.setText("{and}"); + negateNode( x.getFirstChild() ); + negateNode( x.getFirstChild().getNextSibling() ); + return x; + case AND: + x.setType(OR); + x.setText("{or}"); + negateNode( x.getFirstChild() ); + negateNode( x.getFirstChild().getNextSibling() ); + return x; + case EQ: + x.setType( NE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (EQ a b) ) => (NE a b) + case NE: + x.setType( EQ ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (NE a b) ) => (EQ a b) + case GT: + x.setType( LE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (GT a b) ) => (LE a b) + case LT: + x.setType( GE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (LT a b) ) => (GE a b) + case GE: + x.setType( LT ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (GE a b) ) => (LT a b) + case LE: + x.setType( GT ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (LE a b) ) => (GT a b) + case LIKE: + x.setType( NOT_LIKE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (LIKE a b) ) => (NOT_LIKE a b) + case NOT_LIKE: + x.setType( LIKE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (NOT_LIKE a b) ) => (LIKE a b) + case IN: + x.setType( NOT_IN ); + x.setText( "{not}" + x.getText() ); + return x; + case NOT_IN: + x.setType( IN ); + x.setText( "{not}" + x.getText() ); + return x; + case IS_NULL: + x.setType( IS_NOT_NULL ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (IS_NULL a b) ) => (IS_NOT_NULL a b) + case IS_NOT_NULL: + x.setType( IS_NULL ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (IS_NOT_NULL a b) ) => (IS_NULL a b) + case BETWEEN: + x.setType( NOT_BETWEEN ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (BETWEEN a b) ) => (NOT_BETWEEN a b) + case NOT_BETWEEN: + x.setType( BETWEEN ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (NOT_BETWEEN a b) ) => (BETWEEN a b) +/* This can never happen because this rule will always eliminate the child NOT. + case NOT: + return x.getFirstChild(); // (NOT (NOT x) ) => (x) +*/ + default: + return super.negateNode( x ); // Just add a 'not' parent. + } + } + + /** + * Post process equality expressions, clean up the subtree. + * + * @param x The equality expression. + * @return AST - The clean sub-tree. + */ + public AST processEqualityExpression(AST x) { + if ( x == null ) { + log.warn( "processEqualityExpression() : No expression to process!" ); + return null; + } + + int type = x.getType(); + if ( type == EQ || type == NE ) { + boolean negated = type == NE; + if ( x.getNumberOfChildren() == 2 ) { + AST a = x.getFirstChild(); + AST b = a.getNextSibling(); + // (EQ NULL b) => (IS_NULL b) + if ( a.getType() == NULL && b.getType() != NULL ) { + return createIsNullParent( b, negated ); + } + // (EQ a NULL) => (IS_NULL a) + else if ( b.getType() == NULL && a.getType() != NULL ) { + return createIsNullParent( a, negated ); + } + else if ( b.getType() == EMPTY ) { + return processIsEmpty( a, negated ); + } + else { + return x; + } + } + else { + return x; + } + } + else { + return x; + } + } + + private AST createIsNullParent(AST node, boolean negated) { + node.setNextSibling( null ); + int type = negated ? IS_NOT_NULL : IS_NULL; + String text = negated ? "is not null" : "is null"; + return ASTUtil.createParent( astFactory, type, text, node ); + } + + private AST processIsEmpty(AST node, boolean negated) { + node.setNextSibling( null ); + // NOTE: Because we're using ASTUtil.createParent(), the tree must be created from the bottom up. + // IS EMPTY x => (EXISTS (QUERY (SELECT_FROM (FROM x) ) ) ) + AST ast = createSubquery( node ); + ast = ASTUtil.createParent( astFactory, EXISTS, "exists", ast ); + // Add NOT if it's negated. + if ( !negated ) { + ast = ASTUtil.createParent( astFactory, NOT, "not", ast ); + } + return ast; + } + + private AST createSubquery(AST node) { + AST ast = ASTUtil.createParent( astFactory, RANGE, "RANGE", node ); + ast = ASTUtil.createParent( astFactory, FROM, "from", ast ); + ast = ASTUtil.createParent( astFactory, SELECT_FROM, "SELECT_FROM", ast ); + ast = ASTUtil.createParent( astFactory, QUERY, "QUERY", ast ); + return ast; + } + + public void showAst(AST ast, PrintStream out) { + showAst( ast, new PrintWriter( out ) ); + } + + private void showAst(AST ast, PrintWriter pw) { + printer.showAst( ast, pw ); + } + + private void initialize() { + // Initialize the error handling delegate. + parseErrorHandler = new ErrorCounter(); + setASTFactory(new HqlASTFactory()); // Create nodes that track line and column number. + } + + public void weakKeywords() throws TokenStreamException { + + int t = LA( 1 ); + switch ( t ) { + case ORDER: + case GROUP: + // Case 1: Multi token keywords GROUP BY and ORDER BY + // The next token ( LT(2) ) should be 'by'... otherwise, this is just an ident. + if ( LA( 2 ) != LITERAL_by ) { + LT( 1 ).setType( IDENT ); + if ( log.isDebugEnabled() ) { + log.debug( "weakKeywords() : new LT(1) token - " + LT( 1 ) ); + } + } + break; + default: + // Case 2: The current token is after FROM and before '.'. + if (LA(0) == FROM && t != IDENT && LA(2) == DOT) { + HqlToken hqlToken = (HqlToken)LT(1); + if (hqlToken.isPossibleID()) { + hqlToken.setType(IDENT); + if ( log.isDebugEnabled() ) { + log.debug( "weakKeywords() : new LT(1) token - " + LT( 1 ) ); + } + } + } + break; + } + } + + public void handleDotIdent() throws TokenStreamException { + // This handles HHH-354, where there is a strange property name in a where clause. + // If the lookahead contains a DOT then something that isn't an IDENT... + if (LA(1) == DOT && LA(2) != IDENT) { + // See if the second lookahed token can be an identifier. + HqlToken t = (HqlToken)LT(2); + if (t.isPossibleID()) + { + // Set it! + LT( 2 ).setType( IDENT ); + if ( log.isDebugEnabled() ) { + log.debug( "handleDotIdent() : new LT(2) token - " + LT( 1 ) ); + } + } + } + } + + public void processMemberOf(Token n, AST p, ASTPair currentAST) { + AST inAst = n == null ? astFactory.create( IN, "in" ) : astFactory.create( NOT_IN, "not in" ); + astFactory.makeASTRoot( currentAST, inAst ); + AST ast = createSubquery( p ); + ast = ASTUtil.createParent( astFactory, IN_LIST, "inList", ast ); + inAst.addChild( ast ); + } + + static public void panic() { + //overriden to avoid System.exit + throw new QueryException("Parser: panic"); + } +} diff --git a/src/org/hibernate/hql/ast/HqlSqlWalker.java b/src/org/hibernate/hql/ast/HqlSqlWalker.java new file mode 100644 index 0000000000..8684adb898 --- /dev/null +++ b/src/org/hibernate/hql/ast/HqlSqlWalker.java @@ -0,0 +1,1017 @@ +// $Id$ +package org.hibernate.hql.ast; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.QueryException; +import org.hibernate.HibernateException; +import org.hibernate.engine.JoinSequence; +import org.hibernate.engine.ParameterBinder; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.hql.antlr.HqlSqlBaseWalker; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.antlr.HqlTokenTypes; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.tree.AssignmentSpecification; +import org.hibernate.hql.ast.tree.CollectionFunction; +import org.hibernate.hql.ast.tree.ConstructorNode; +import org.hibernate.hql.ast.tree.DeleteStatement; +import org.hibernate.hql.ast.tree.DotNode; +import org.hibernate.hql.ast.tree.FromClause; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.hql.ast.tree.FromReferenceNode; +import org.hibernate.hql.ast.tree.IdentNode; +import org.hibernate.hql.ast.tree.IndexNode; +import org.hibernate.hql.ast.tree.InsertStatement; +import org.hibernate.hql.ast.tree.IntoClause; +import org.hibernate.hql.ast.tree.MethodNode; +import org.hibernate.hql.ast.tree.ParameterNode; +import org.hibernate.hql.ast.tree.QueryNode; +import org.hibernate.hql.ast.tree.ResolvableNode; +import org.hibernate.hql.ast.tree.RestrictableStatement; +import org.hibernate.hql.ast.tree.SelectClause; +import org.hibernate.hql.ast.tree.SelectExpression; +import org.hibernate.hql.ast.tree.UpdateStatement; +import org.hibernate.hql.ast.tree.Node; +import org.hibernate.hql.ast.tree.OperatorNode; +import org.hibernate.hql.ast.util.ASTPrinter; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.hql.ast.util.AliasGenerator; +import org.hibernate.hql.ast.util.JoinProcessor; +import org.hibernate.hql.ast.util.LiteralProcessor; +import org.hibernate.hql.ast.util.SessionFactoryHelper; +import org.hibernate.hql.ast.util.SyntheticAndFactory; +import org.hibernate.hql.ast.util.NodeTraverser; +import org.hibernate.id.IdentifierGenerator; +import org.hibernate.id.PostInsertIdentifierGenerator; +import org.hibernate.id.SequenceGenerator; +import org.hibernate.param.NamedParameterSpecification; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.param.PositionalParameterSpecification; +import org.hibernate.param.VersionTypeSeedParameterSpecification; +import org.hibernate.param.CollectionFilterKeyParameterSpecification; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.JoinFragment; +import org.hibernate.type.AssociationType; +import org.hibernate.type.Type; +import org.hibernate.type.VersionType; +import org.hibernate.type.DbTimestampType; +import org.hibernate.usertype.UserVersionType; +import org.hibernate.util.ArrayHelper; + +import antlr.ASTFactory; +import antlr.RecognitionException; +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * Implements methods used by the HQL->SQL tree transform grammar (a.k.a. the second phase). + *
      + *
    • Isolates the Hibernate API-specific code from the ANTLR generated code.
    • + *
    • Handles the SQL framgents generated by the persisters in order to create the SELECT and FROM clauses, + * taking into account the joins and projections that are implied by the mappings (persister/queryable).
    • + *
    • Uses SqlASTFactory to create customized AST nodes.
    • + *
    + * + * @see SqlASTFactory + */ +public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, ParameterBinder.NamedParameterSource { + private static final Log log = LogFactory.getLog( HqlSqlWalker.class ); + + private final QueryTranslatorImpl queryTranslatorImpl; + private final HqlParser hqlParser; + private final SessionFactoryHelper sessionFactoryHelper; + private final Map tokenReplacements; + private final AliasGenerator aliasGenerator = new AliasGenerator(); + private final LiteralProcessor literalProcessor; + private final ParseErrorHandler parseErrorHandler; + private final ASTPrinter printer; + private final String collectionFilterRole; + + private FromClause currentFromClause = null; + private SelectClause selectClause; + + private Set querySpaces = new HashSet(); + + private int parameterCount; + private Map namedParameters = new HashMap(); + private ArrayList parameters = new ArrayList(); + private int numberOfParametersInSetClause; + private int positionalParameterCount; + + private ArrayList assignmentSpecifications = new ArrayList(); + + private int impliedJoinType; + + + + /** + * Create a new tree transformer. + * + * @param qti Back pointer to the query translator implementation that is using this tree transform. + * @param sfi The session factory implementor where the Hibernate mappings can be found. + * @param parser A reference to the phase-1 parser + * @param tokenReplacements Registers the token replacement map with the walker. This map will + * be used to substitute function names and constants. + * @param collectionRole The collection role name of the collection used as the basis for the + * filter, NULL if this is not a collection filter compilation. + */ + public HqlSqlWalker( + QueryTranslatorImpl qti, + SessionFactoryImplementor sfi, + HqlParser parser, + Map tokenReplacements, + String collectionRole) { + setASTFactory( new SqlASTFactory( this ) ); + this.parseErrorHandler = new ErrorCounter(); + this.queryTranslatorImpl = qti; + this.sessionFactoryHelper = new SessionFactoryHelper( sfi ); + this.literalProcessor = new LiteralProcessor( this ); + this.tokenReplacements = tokenReplacements; + this.hqlParser = parser; + this.printer = new ASTPrinter( SqlTokenTypes.class ); + this.collectionFilterRole = collectionRole; + } + + + protected void prepareFromClauseInputTree(AST fromClauseInput) { + if ( !isSubQuery() ) { +// // inject param specifications to account for dynamic filter param values +// if ( ! getEnabledFilters().isEmpty() ) { +// Iterator filterItr = getEnabledFilters().values().iterator(); +// while ( filterItr.hasNext() ) { +// FilterImpl filter = ( FilterImpl ) filterItr.next(); +// if ( ! filter.getFilterDefinition().getParameterNames().isEmpty() ) { +// Iterator paramItr = filter.getFilterDefinition().getParameterNames().iterator(); +// while ( paramItr.hasNext() ) { +// String parameterName = ( String ) paramItr.next(); +// // currently param filters *only* work with single-column parameter types; +// // if that limitation is ever lifted, this logic will need to change to account for that +// ParameterNode collectionFilterKeyParameter = ( ParameterNode ) astFactory.create( PARAM, "?" ); +// DynamicFilterParameterSpecification paramSpec = new DynamicFilterParameterSpecification( +// filter.getName(), +// parameterName, +// filter.getFilterDefinition().getParameterType( parameterName ), +// positionalParameterCount++ +// ); +// collectionFilterKeyParameter.setHqlParameterSpecification( paramSpec ); +// parameters.add( paramSpec ); +// } +// } +// } +// } + + if ( isFilter() ) { + // Handle collection-fiter compilation. + // IMPORTANT NOTE: This is modifying the INPUT (HQL) tree, not the output tree! + QueryableCollection persister = sessionFactoryHelper.getCollectionPersister( collectionFilterRole ); + Type collectionElementType = persister.getElementType(); + if ( !collectionElementType.isEntityType() ) { + throw new QueryException( "collection of values in filter: this" ); + } + + String collectionElementEntityName = persister.getElementPersister().getEntityName(); + ASTFactory inputAstFactory = hqlParser.getASTFactory(); + AST fromElement = ASTUtil.create( inputAstFactory, HqlTokenTypes.FILTER_ENTITY, collectionElementEntityName ); + ASTUtil.createSibling( inputAstFactory, HqlTokenTypes.ALIAS, "this", fromElement ); + fromClauseInput.addChild( fromElement ); + // Show the modified AST. + if ( log.isDebugEnabled() ) { + log.debug( "prepareFromClauseInputTree() : Filter - Added 'this' as a from element..." ); + } + queryTranslatorImpl.showHqlAst( hqlParser.getAST() ); + + // Create a parameter specification for the collection filter... + Type collectionFilterKeyType = sessionFactoryHelper.requireQueryableCollection( collectionFilterRole ).getKeyType(); + ParameterNode collectionFilterKeyParameter = ( ParameterNode ) astFactory.create( PARAM, "?" ); + CollectionFilterKeyParameterSpecification collectionFilterKeyParameterSpec = new CollectionFilterKeyParameterSpecification( + collectionFilterRole, collectionFilterKeyType, positionalParameterCount++ + ); + collectionFilterKeyParameter.setHqlParameterSpecification( collectionFilterKeyParameterSpec ); + parameters.add( collectionFilterKeyParameterSpec ); + } + } + } + + public boolean isFilter() { + return collectionFilterRole != null; + } + + public SessionFactoryHelper getSessionFactoryHelper() { + return sessionFactoryHelper; + } + + public Map getTokenReplacements() { + return tokenReplacements; + } + + public AliasGenerator getAliasGenerator() { + return aliasGenerator; + } + + public FromClause getCurrentFromClause() { + return currentFromClause; + } + + public ParseErrorHandler getParseErrorHandler() { + return parseErrorHandler; + } + + public void reportError(RecognitionException e) { + parseErrorHandler.reportError( e ); // Use the delegate. + } + + public void reportError(String s) { + parseErrorHandler.reportError( s ); // Use the delegate. + } + + public void reportWarning(String s) { + parseErrorHandler.reportWarning( s ); + } + + /** + * Returns the set of unique query spaces (a.k.a. + * table names) that occurred in the query. + * + * @return A set of table names (Strings). + */ + public Set getQuerySpaces() { + return querySpaces; + } + + protected AST createFromElement(String path, AST alias, AST propertyFetch) throws SemanticException { + FromElement fromElement = currentFromClause.addFromElement( path, alias ); + fromElement.setAllPropertyFetch(propertyFetch!=null); + return fromElement; + } + + protected AST createFromFilterElement(AST filterEntity, AST alias) throws SemanticException { + FromElement fromElement = currentFromClause.addFromElement( filterEntity.getText(), alias ); + FromClause fromClause = fromElement.getFromClause(); + QueryableCollection persister = sessionFactoryHelper.getCollectionPersister( collectionFilterRole ); + // Get the names of the columns used to link between the collection + // owner and the collection elements. + String[] keyColumnNames = persister.getKeyColumnNames(); + String fkTableAlias = persister.isOneToMany() + ? fromElement.getTableAlias() + : fromClause.getAliasGenerator().createName( collectionFilterRole ); + JoinSequence join = sessionFactoryHelper.createJoinSequence(); + join.setRoot( persister, fkTableAlias ); + if ( !persister.isOneToMany() ) { + join.addJoin( ( AssociationType ) persister.getElementType(), + fromElement.getTableAlias(), + JoinFragment.INNER_JOIN, + persister.getElementColumnNames( fkTableAlias ) ); + } + join.addCondition( fkTableAlias, keyColumnNames, " = ?" ); + fromElement.setJoinSequence( join ); + fromElement.setFilter( true ); + if ( log.isDebugEnabled() ) { + log.debug( "createFromFilterElement() : processed filter FROM element." ); + } + return fromElement; + } + + protected void createFromJoinElement( + AST path, + AST alias, + int joinType, + AST fetchNode, + AST propertyFetch, + AST with) throws SemanticException { + boolean fetch = fetchNode != null; + if ( fetch && isSubQuery() ) { + throw new QueryException( "fetch not allowed in subquery from-elements" ); + } + // The path AST should be a DotNode, and it should have been evaluated already. + if ( path.getType() != SqlTokenTypes.DOT ) { + throw new SemanticException( "Path expected for join!" ); + } + DotNode dot = ( DotNode ) path; + int hibernateJoinType = JoinProcessor.toHibernateJoinType( joinType ); + dot.setJoinType( hibernateJoinType ); // Tell the dot node about the join type. + dot.setFetch( fetch ); + // Generate an explicit join for the root dot node. The implied joins will be collected and passed up + // to the root dot node. + dot.resolve( true, false, alias == null ? null : alias.getText() ); + FromElement fromElement = dot.getImpliedJoin(); + fromElement.setAllPropertyFetch(propertyFetch!=null); + + if ( with != null ) { + if ( fetch ) { + throw new SemanticException( "with-clause not allowed on fetched associations; use filters" ); + } + handleWithFragment( fromElement, with ); + } + + if ( log.isDebugEnabled() ) { + log.debug( "createFromJoinElement() : " + getASTPrinter().showAsString( fromElement, "-- join tree --" ) ); + } + } + + private void handleWithFragment(FromElement fromElement, AST hqlWithNode) throws SemanticException + { + try { + withClause( hqlWithNode ); + AST hqlSqlWithNode = returnAST; + if ( log.isDebugEnabled() ) { + log.debug( "handleWithFragment() : " + getASTPrinter().showAsString( hqlSqlWithNode, "-- with clause --" ) ); + } + WithClauseVisitor visitor = new WithClauseVisitor(); + NodeTraverser traverser = new NodeTraverser( visitor ); + traverser.traverseDepthFirst( hqlSqlWithNode ); + FromElement referencedFromElement = visitor.getReferencedFromElement(); + if ( referencedFromElement != fromElement ) { + throw new InvalidWithClauseException( "with-clause expressions did not reference from-clause element to which the with-clause was associated" ); + } + SqlGenerator sql = new SqlGenerator( getSessionFactoryHelper().getFactory() ); + sql.whereExpr( hqlSqlWithNode.getFirstChild() ); + fromElement.setWithClauseFragment( visitor.getJoinAlias(), "(" + sql.getSQL() + ")" ); + + } + catch( SemanticException e ) { + throw e; + } + catch( InvalidWithClauseException e ) { + throw e; + } + catch ( Exception e) { + throw new SemanticException( e.getMessage() ); + } + } + + private static class WithClauseVisitor implements NodeTraverser.VisitationStrategy { + private FromElement referencedFromElement; + private String joinAlias; + + public void visit(AST node) { + // todo : currently expects that the individual with expressions apply to the same sql table join. + // This may not be the case for joined-subclass where the property values + // might be coming from different tables in the joined hierarchy. At some + // point we should expand this to support that capability. However, that has + // some difficulties: + // 1) the biggest is how to handle ORs when the individual comparisons are + // linked to different sql joins. + // 2) here we would need to track each comparison individually, along with + // the join alias to which it applies and then pass that information + // back to the FromElement so it can pass it along to the JoinSequence + + if ( node instanceof DotNode ) { + DotNode dotNode = ( DotNode ) node; + FromElement fromElement = dotNode.getFromElement(); + if ( referencedFromElement != null ) { + if ( fromElement != referencedFromElement ) { + throw new HibernateException( "with-clause referenced two different from-clause elements" ); + } + } + else { + referencedFromElement = fromElement; + joinAlias = extractAppliedAlias( dotNode ); + // todo : temporary + // needed because currently persister is the one that + // creates and renders the join fragments for inheritence + // hierarchies... + if ( !joinAlias.equals( referencedFromElement.getTableAlias() ) ) { + throw new InvalidWithClauseException( "with clause can only reference columns in the driving table" ); + } + } + } + } + + private String extractAppliedAlias(DotNode dotNode) { + return dotNode.getText().substring( 0, dotNode.getText().indexOf( '.' ) ); + } + + public FromElement getReferencedFromElement() { + return referencedFromElement; + } + + public String getJoinAlias() { + return joinAlias; + } + } + + /** + * Sets the current 'FROM' context. + * + * @param fromNode The new 'FROM' context. + * @param inputFromNode The from node from the input AST. + */ + protected void pushFromClause(AST fromNode, AST inputFromNode) { + FromClause newFromClause = ( FromClause ) fromNode; + newFromClause.setParentFromClause( currentFromClause ); + currentFromClause = newFromClause; + } + + /** + * Returns to the previous 'FROM' context. + */ + private void popFromClause() { + currentFromClause = currentFromClause.getParentFromClause(); + } + + protected void lookupAlias(AST aliasRef) + throws SemanticException { + FromElement alias = currentFromClause.getFromElement( aliasRef.getText() ); + FromReferenceNode aliasRefNode = ( FromReferenceNode ) aliasRef; + aliasRefNode.setFromElement( alias ); + } + + protected void setImpliedJoinType(int joinType) { + impliedJoinType = JoinProcessor.toHibernateJoinType( joinType ); + } + + public int getImpliedJoinType() { + return impliedJoinType; + } + + protected AST lookupProperty(AST dot, boolean root, boolean inSelect) throws SemanticException { + DotNode dotNode = ( DotNode ) dot; + FromReferenceNode lhs = dotNode.getLhs(); + AST rhs = lhs.getNextSibling(); + switch ( rhs.getType() ) { + case SqlTokenTypes.ELEMENTS: + case SqlTokenTypes.INDICES: + if ( log.isDebugEnabled() ) { + log.debug( "lookupProperty() " + dotNode.getPath() + " => " + rhs.getText() + "(" + lhs.getPath() + ")" ); + } + CollectionFunction f = ( CollectionFunction ) rhs; + // Re-arrange the tree so that the collection function is the root and the lhs is the path. + f.setFirstChild( lhs ); + lhs.setNextSibling( null ); + dotNode.setFirstChild( f ); + resolve( lhs ); // Don't forget to resolve the argument! + f.resolve( inSelect ); // Resolve the collection function now. + return f; + default: + // Resolve everything up to this dot, but don't resolve the placeholders yet. + dotNode.resolveFirstChild(); + return dotNode; + } + } + + protected boolean isNonQualifiedPropertyRef(AST ident) { + final String identText = ident.getText(); + if ( currentFromClause.isFromElementAlias( identText ) ) { + return false; + } + + List fromElements = currentFromClause.getExplicitFromElements(); + if ( fromElements.size() == 1 ) { + final FromElement fromElement = ( FromElement ) fromElements.get( 0 ); + try { + log.trace( "attempting to resolve property [" + identText + "] as a non-qualified ref" ); + return fromElement.getPropertyMapping( identText ).toType( identText ) != null; + } + catch( QueryException e ) { + // Should mean that no such property was found + } + } + + return false; + } + + protected AST lookupNonQualifiedProperty(AST property) throws SemanticException { + final FromElement fromElement = ( FromElement ) currentFromClause.getExplicitFromElements().get( 0 ); + AST syntheticDotNode = generateSyntheticDotNodeForNonQualifiedPropertyRef( property, fromElement ); + return lookupProperty( syntheticDotNode, false, getCurrentClauseType() == HqlSqlTokenTypes.SELECT ); + } + + private AST generateSyntheticDotNodeForNonQualifiedPropertyRef(AST property, FromElement fromElement) { + AST dot = getASTFactory().create( DOT, "{non-qualified-property-ref}" ); + // TODO : better way?!? + ( ( DotNode ) dot ).setPropertyPath( ( ( FromReferenceNode ) property ).getPath() ); + + IdentNode syntheticAlias = ( IdentNode ) getASTFactory().create( IDENT, "{synthetic-alias}" ); + syntheticAlias.setFromElement( fromElement ); + syntheticAlias.setResolved(); + + dot.setFirstChild( syntheticAlias ); + dot.addChild( property ); + + return dot; + } + + protected void processQuery(AST select, AST query) throws SemanticException { + if ( log.isDebugEnabled() ) { + log.debug( "processQuery() : " + query.toStringTree() ); + } + + try { + QueryNode qn = ( QueryNode ) query; + + // Was there an explicit select expression? + boolean explicitSelect = select != null && select.getNumberOfChildren() > 0; + + if ( !explicitSelect ) { + // No explicit select expression; render the id and properties + // projection lists for every persister in the from clause into + // a single 'token node'. + //TODO: the only reason we need this stuff now is collection filters, + // we should get rid of derived select clause completely! + createSelectClauseFromFromClause( qn ); + } + else { + // Use the explicitly declared select expression; determine the + // return types indicated by each select token + useSelectClause( select ); + } + + // After that, process the JOINs. + // Invoke a delegate to do the work, as this is farily complex. + JoinProcessor joinProcessor = new JoinProcessor( astFactory, queryTranslatorImpl ); + joinProcessor.processJoins( qn, isSubQuery() ); + + // Attach any mapping-defined "ORDER BY" fragments + Iterator itr = qn.getFromClause().getProjectionList().iterator(); + while ( itr.hasNext() ) { + final FromElement fromElement = ( FromElement ) itr.next(); +// if ( fromElement.isFetch() && fromElement.isCollectionJoin() ) { + if ( fromElement.isFetch() && fromElement.getQueryableCollection() != null ) { + // Does the collection referenced by this FromElement + // specify an order-by attribute? If so, attach it to + // the query's order-by + if ( fromElement.getQueryableCollection().hasOrdering() ) { + String orderByFragment = fromElement + .getQueryableCollection() + .getSQLOrderByString( fromElement.getCollectionTableAlias() ); + qn.getOrderByClause().addOrderFragment( orderByFragment ); + } + if ( fromElement.getQueryableCollection().hasManyToManyOrdering() ) { + String orderByFragment = fromElement.getQueryableCollection() + .getManyToManyOrderByString( fromElement.getTableAlias() ); + qn.getOrderByClause().addOrderFragment( orderByFragment ); + } + } + } + } + finally { + popFromClause(); + } + } + + protected void postProcessDML(RestrictableStatement statement) throws SemanticException { + statement.getFromClause().resolve(); + + FromElement fromElement = ( FromElement ) statement.getFromClause().getFromElements().get( 0 ); + Queryable persister = fromElement.getQueryable(); + // Make #@%$^#^&# sure no alias is applied to the table name + fromElement.setText( persister.getTableName() ); + + // append any filter fragments; the EMPTY_MAP is used under the assumption that + // currently enabled filters should not affect this process + if ( persister.getDiscriminatorType() != null ) { + new SyntheticAndFactory( getASTFactory() ).addDiscriminatorWhereFragment( + statement, + persister, + java.util.Collections.EMPTY_MAP, + fromElement.getTableAlias() + ); + } + + } + + protected void postProcessUpdate(AST update) throws SemanticException { + UpdateStatement updateStatement = ( UpdateStatement ) update; + + postProcessDML( updateStatement ); + } + + protected void postProcessDelete(AST delete) throws SemanticException { + postProcessDML( ( DeleteStatement ) delete ); + } + + public static boolean supportsIdGenWithBulkInsertion(IdentifierGenerator generator) { + return SequenceGenerator.class.isAssignableFrom( generator.getClass() ) + || PostInsertIdentifierGenerator.class.isAssignableFrom( generator.getClass() ); + } + + protected void postProcessInsert(AST insert) throws SemanticException, QueryException { + InsertStatement insertStatement = ( InsertStatement ) insert; + insertStatement.validate(); + + SelectClause selectClause = insertStatement.getSelectClause(); + Queryable persister = insertStatement.getIntoClause().getQueryable(); + + if ( !insertStatement.getIntoClause().isExplicitIdInsertion() ) { + // We need to generate ids as part of this bulk insert. + // + // Note that this is only supported for sequence-style generators and + // post-insert-style generators; basically, only in-db generators + IdentifierGenerator generator = persister.getIdentifierGenerator(); + if ( !supportsIdGenWithBulkInsertion( generator ) ) { + throw new QueryException( "can only generate ids as part of bulk insert with either sequence or post-insert style generators" ); + } + + AST idSelectExprNode = null; + + if ( SequenceGenerator.class.isAssignableFrom( generator.getClass() ) ) { + String seqName = ( String ) ( ( SequenceGenerator ) generator ).generatorKey(); + String nextval = sessionFactoryHelper.getFactory().getDialect().getSelectSequenceNextValString( seqName ); + idSelectExprNode = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, nextval ); + } + else { + //Don't need this, because we should never ever be selecting no columns in an insert ... select... + //and because it causes a bug on DB2 + /*String idInsertString = sessionFactoryHelper.getFactory().getDialect().getIdentityInsertString(); + if ( idInsertString != null ) { + idSelectExprNode = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, idInsertString ); + }*/ + } + + if ( idSelectExprNode != null ) { + AST currentFirstSelectExprNode = selectClause.getFirstChild(); + selectClause.setFirstChild( idSelectExprNode ); + idSelectExprNode.setNextSibling( currentFirstSelectExprNode ); + + insertStatement.getIntoClause().prependIdColumnSpec(); + } + } + + final boolean includeVersionProperty = persister.isVersioned() && + !insertStatement.getIntoClause().isExplicitVersionInsertion() && + persister.isVersionPropertyInsertable(); + if ( includeVersionProperty ) { + // We need to seed the version value as part of this bulk insert + VersionType versionType = persister.getVersionType(); + AST versionValueNode = null; + + if ( sessionFactoryHelper.getFactory().getDialect().supportsParametersInInsertSelect() ) { + versionValueNode = getASTFactory().create( HqlSqlTokenTypes.PARAM, "?" ); + ParameterSpecification paramSpec = new VersionTypeSeedParameterSpecification( versionType ); + ( ( ParameterNode ) versionValueNode ).setHqlParameterSpecification( paramSpec ); + parameters.add( 0, paramSpec ); + } + else { + if ( isIntegral( versionType ) ) { + try { + Object seedValue = versionType.seed( null ); + versionValueNode = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, seedValue.toString() ); + } + catch( Throwable t ) { + throw new QueryException( "could not determine seed value for version on bulk insert [" + versionType + "]" ); + } + } + else if ( isDatabaseGeneratedTimestamp( versionType ) ) { + String functionName = sessionFactoryHelper.getFactory().getDialect().getCurrentTimestampSQLFunctionName(); + versionValueNode = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, functionName ); + } + else { + throw new QueryException( "cannot handle version type [" + versionType + "] on bulk inserts with dialects not supporting parameters in insert-select statements" ); + } + } + + AST currentFirstSelectExprNode = selectClause.getFirstChild(); + selectClause.setFirstChild( versionValueNode ); + versionValueNode.setNextSibling( currentFirstSelectExprNode ); + + insertStatement.getIntoClause().prependVersionColumnSpec(); + } + + if ( insertStatement.getIntoClause().isDiscriminated() ) { + String sqlValue = insertStatement.getIntoClause().getQueryable().getDiscriminatorSQLValue(); + AST discrimValue = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, sqlValue ); + insertStatement.getSelectClause().addChild( discrimValue ); + } + + } + + private boolean isDatabaseGeneratedTimestamp(Type type) { + // currently only the Hibernate-supplied DbTimestampType is supported here + return DbTimestampType.class.isAssignableFrom( type.getClass() ); + } + + private boolean isIntegral(Type type) { + return Long.class.isAssignableFrom( type.getReturnedClass() ) + || Integer.class.isAssignableFrom( type.getReturnedClass() ) + || long.class.isAssignableFrom( type.getReturnedClass() ) + || int.class.isAssignableFrom( type.getReturnedClass() ); + } + + private void useSelectClause(AST select) throws SemanticException { + selectClause = ( SelectClause ) select; + selectClause.initializeExplicitSelectClause( currentFromClause ); + } + + private void createSelectClauseFromFromClause(QueryNode qn) throws SemanticException { + AST select = astFactory.create( SELECT_CLAUSE, "{derived select clause}" ); + AST sibling = qn.getFromClause(); + qn.setFirstChild( select ); + select.setNextSibling( sibling ); + selectClause = ( SelectClause ) select; + selectClause.initializeDerivedSelectClause( currentFromClause ); + if ( log.isDebugEnabled() ) { + log.debug( "Derived SELECT clause created." ); + } + } + + protected void resolve(AST node) throws SemanticException { + if ( node != null ) { + // This is called when it's time to fully resolve a path expression. + ResolvableNode r = ( ResolvableNode ) node; + if ( isInFunctionCall() ) { + r.resolveInFunctionCall( false, true ); + } + else { + r.resolve( false, true ); // Generate implicit joins, only if necessary. + } + } + } + + protected void resolveSelectExpression(AST node) throws SemanticException { + // This is called when it's time to fully resolve a path expression. + int type = node.getType(); + switch ( type ) { + case DOT: + DotNode dot = ( DotNode ) node; + dot.resolveSelectExpression(); + break; + case ALIAS_REF: + // Notify the FROM element that it is being referenced by the select. + FromReferenceNode aliasRefNode = ( FromReferenceNode ) node; + //aliasRefNode.resolve( false, false, aliasRefNode.getText() ); //TODO: is it kosher to do it here? + aliasRefNode.resolve( false, false ); //TODO: is it kosher to do it here? + FromElement fromElement = aliasRefNode.getFromElement(); + if ( fromElement != null ) { + fromElement.setIncludeSubclasses( true ); + } + default: + break; + } + } + + protected void beforeSelectClause() throws SemanticException { + // Turn off includeSubclasses on all FromElements. + FromClause from = getCurrentFromClause(); + List fromElements = from.getFromElements(); + for ( Iterator iterator = fromElements.iterator(); iterator.hasNext(); ) { + FromElement fromElement = ( FromElement ) iterator.next(); + fromElement.setIncludeSubclasses( false ); + } + } + + protected AST generatePositionalParameter(AST inputNode) throws SemanticException { + if ( namedParameters.size() > 0 ) { + throw new SemanticException( "cannot define positional parameter after any named parameters have been defined" ); + } + ParameterNode parameter = ( ParameterNode ) astFactory.create( PARAM, "?" ); + PositionalParameterSpecification paramSpec = new PositionalParameterSpecification( + ( ( Node ) inputNode ).getLine(), + ( ( Node ) inputNode ).getColumn(), + positionalParameterCount++ + ); + parameter.setHqlParameterSpecification( paramSpec ); + parameters.add( paramSpec ); + return parameter; + } + + protected AST generateNamedParameter(AST delimiterNode, AST nameNode) throws SemanticException { + String name = nameNode.getText(); + trackNamedParameterPositions( name ); + + // create the node initially with the param name so that it shows + // appropriately in the "original text" attribute + ParameterNode parameter = ( ParameterNode ) astFactory.create( NAMED_PARAM, name ); + parameter.setText( "?" ); + + NamedParameterSpecification paramSpec = new NamedParameterSpecification( + ( ( Node ) delimiterNode ).getLine(), + ( ( Node ) delimiterNode ).getColumn(), + name + ); + parameter.setHqlParameterSpecification( paramSpec ); + parameters.add( paramSpec ); + return parameter; + } + + private void trackNamedParameterPositions(String name) { + Integer loc = new Integer( parameterCount++ ); + Object o = namedParameters.get( name ); + if ( o == null ) { + namedParameters.put( name, loc ); + } + else if ( o instanceof Integer ) { + ArrayList list = new ArrayList( 4 ); + list.add( o ); + list.add( loc ); + namedParameters.put( name, list ); + } + else { + ( ( ArrayList ) o ).add( loc ); + } + } + + protected void processConstant(AST constant) throws SemanticException { + literalProcessor.processConstant( constant, true ); // Use the delegate, resolve identifiers as FROM element aliases. + } + + protected void processBoolean(AST constant) throws SemanticException { + literalProcessor.processBoolean( constant ); // Use the delegate. + } + + protected void processNumericLiteral(AST literal) { + literalProcessor.processNumeric( literal ); + } + + protected void processIndex(AST indexOp) throws SemanticException { + IndexNode indexNode = ( IndexNode ) indexOp; + indexNode.resolve( true, true ); + } + + protected void processFunction(AST functionCall, boolean inSelect) throws SemanticException { + MethodNode methodNode = ( MethodNode ) functionCall; + methodNode.resolve( inSelect ); + } + + protected void processConstructor(AST constructor) throws SemanticException { + ConstructorNode constructorNode = ( ConstructorNode ) constructor; + constructorNode.prepare(); + } + + protected void setAlias(AST selectExpr, AST ident) { + ((SelectExpression) selectExpr).setAlias(ident.getText()); + } + + /** + * Returns the locations of all occurrences of the named parameter. + */ + public int[] getNamedParameterLocations(String name) throws QueryException { + Object o = namedParameters.get( name ); + if ( o == null ) { + QueryException qe = new QueryException( QueryTranslator.ERROR_NAMED_PARAMETER_DOES_NOT_APPEAR + name ); + qe.setQueryString( queryTranslatorImpl.getQueryString() ); + throw qe; + } + if ( o instanceof Integer ) { + return new int[]{( ( Integer ) o ).intValue()}; + } + else { + return ArrayHelper.toIntArray( ( ArrayList ) o ); + } + } + + public void addQuerySpaces(Serializable[] spaces) { + for ( int i = 0; i < spaces.length; i++ ) { + querySpaces.add( spaces[i] ); + } + } + + public Type[] getReturnTypes() { + return selectClause.getQueryReturnTypes(); + } + + public String[] getReturnAliases() { + return selectClause.getQueryReturnAliases(); + } + + public SelectClause getSelectClause() { + return selectClause; + } + + public FromClause getFinalFromClause() { + FromClause top = currentFromClause; + while ( top.getParentFromClause() != null ) { + top = top.getParentFromClause(); + } + return top; + } + + public boolean isShallowQuery() { + // select clauses for insert statements should alwasy be treated as shallow + return getStatementType() == INSERT || queryTranslatorImpl.isShallowQuery(); + } + + public Map getEnabledFilters() { + return queryTranslatorImpl.getEnabledFilters(); + } + + public LiteralProcessor getLiteralProcessor() { + return literalProcessor; + } + + public ASTPrinter getASTPrinter() { + return printer; + } + + public ArrayList getParameters() { + return parameters; + } + + public int getNumberOfParametersInSetClause() { + return numberOfParametersInSetClause; + } + + protected void evaluateAssignment(AST eq) throws SemanticException { + prepareLogicOperator( eq ); + Queryable persister = getCurrentFromClause().getFromElement().getQueryable(); + evaluateAssignment( eq, persister, -1 ); + } + + private void evaluateAssignment(AST eq, Queryable persister, int targetIndex) { + if ( persister.isMultiTable() ) { + // no need to even collect this information if the persister is considered multi-table + AssignmentSpecification specification = new AssignmentSpecification( eq, persister ); + if ( targetIndex >= 0 ) { + assignmentSpecifications.add( targetIndex, specification ); + } + else { + assignmentSpecifications.add( specification ); + } + numberOfParametersInSetClause += specification.getParameters().length; + } + } + + public ArrayList getAssignmentSpecifications() { + return assignmentSpecifications; + } + + protected AST createIntoClause(String path, AST propertySpec) throws SemanticException { + Queryable persister = ( Queryable ) getSessionFactoryHelper().requireClassPersister( path ); + + IntoClause intoClause = ( IntoClause ) getASTFactory().create( INTO, persister.getEntityName() ); + intoClause.setFirstChild( propertySpec ); + intoClause.initialize( persister ); + + addQuerySpaces( persister.getQuerySpaces() ); + + return intoClause; + } + + protected void prepareVersioned(AST updateNode, AST versioned) throws SemanticException { + UpdateStatement updateStatement = ( UpdateStatement ) updateNode; + FromClause fromClause = updateStatement.getFromClause(); + if ( versioned != null ) { + // Make sure that the persister is versioned + Queryable persister = fromClause.getFromElement().getQueryable(); + if ( !persister.isVersioned() ) { + throw new SemanticException( "increment option specified for update of non-versioned entity" ); + } + + VersionType versionType = persister.getVersionType(); + if ( versionType instanceof UserVersionType ) { + throw new SemanticException( "user-defined version types not supported for increment option" ); + } + + AST eq = getASTFactory().create( HqlSqlTokenTypes.EQ, "=" ); + AST versionPropertyNode = generateVersionPropertyNode( persister ); + + eq.setFirstChild( versionPropertyNode ); + + AST versionIncrementNode = null; + if ( Date.class.isAssignableFrom( versionType.getReturnedClass() ) ) { + versionIncrementNode = getASTFactory().create( HqlSqlTokenTypes.PARAM, "?" ); + ParameterSpecification paramSpec = new VersionTypeSeedParameterSpecification( versionType ); + ( ( ParameterNode ) versionIncrementNode ).setHqlParameterSpecification( paramSpec ); + parameters.add( 0, paramSpec ); + } + else { + // Not possible to simply re-use the versionPropertyNode here as it causes + // OOM errors due to circularity :( + versionIncrementNode = getASTFactory().create( HqlSqlTokenTypes.PLUS, "+" ); + versionIncrementNode.setFirstChild( generateVersionPropertyNode( persister ) ); + versionIncrementNode.addChild( getASTFactory().create( HqlSqlTokenTypes.IDENT, "1" ) ); + } + + eq.addChild( versionIncrementNode ); + + evaluateAssignment( eq, persister, 0 ); + + AST setClause = updateStatement.getSetClause(); + AST currentFirstSetElement = setClause.getFirstChild(); + setClause.setFirstChild( eq ); + eq.setNextSibling( currentFirstSetElement ); + } + } + + private AST generateVersionPropertyNode(Queryable persister) throws SemanticException { + String versionPropertyName = persister.getPropertyNames()[ persister.getVersionProperty() ]; + AST versionPropertyRef = getASTFactory().create( HqlSqlTokenTypes.IDENT, versionPropertyName ); + AST versionPropertyNode = lookupNonQualifiedProperty( versionPropertyRef ); + resolve( versionPropertyNode ); + return versionPropertyNode; + } + + protected void prepareLogicOperator(AST operator) throws SemanticException { + ( ( OperatorNode ) operator ).initialize(); + } + + protected void prepareArithmeticOperator(AST operator) throws SemanticException { + ( ( OperatorNode ) operator ).initialize(); + } + + public static void panic() { + throw new QueryException( "TreeWalker: panic" ); + } +} diff --git a/src/org/hibernate/hql/ast/HqlToken.java b/src/org/hibernate/hql/ast/HqlToken.java new file mode 100644 index 0000000000..6a212f67ad --- /dev/null +++ b/src/org/hibernate/hql/ast/HqlToken.java @@ -0,0 +1,71 @@ +// $Id$ +package org.hibernate.hql.ast; + +/** + * A custom token class for the HQL grammar. + *

    NOTE: This class must be public becuase it is instantiated by the ANTLR library. Ignore any suggestions + * by various code 'analyzers' about this class being package local.

    + */ +public class HqlToken extends antlr.CommonToken { + /** + * True if this token could be an identifier. * + */ + private boolean possibleID = false; + /** + * The previous token type. * + */ + private int tokenType; + + /** + * Returns true if the token could be an identifier. + * + * @return True if the token could be interpreted as in identifier, + * false if not. + */ + public boolean isPossibleID() { + return possibleID; + } + + /** + * Sets the type of the token, remembering the previous type. + * + * @param t The new token type. + */ + public void setType(int t) { + this.tokenType = getType(); + super.setType( t ); + } + + /** + * Returns the previous token type. + * + * @return int - The old token type. + */ + private int getPreviousType() { + return tokenType; + } + + /** + * Set to true if this token can be interpreted as an identifier, + * false if not. + * + * @param possibleID True if this is a keyword/identifier, false if not. + */ + public void setPossibleID(boolean possibleID) { + this.possibleID = possibleID; + } + + /** + * Returns a string representation of the object. + * + * @return String - The debug string. + */ + public String toString() { + return "[\"" + + getText() + + "\",<" + getType() + "> previously: <" + getPreviousType() + ">,line=" + + line + ",col=" + + col + ",possibleID=" + possibleID + "]"; + } + +} diff --git a/src/org/hibernate/hql/ast/InvalidPathException.java b/src/org/hibernate/hql/ast/InvalidPathException.java new file mode 100644 index 0000000000..474b06c9d1 --- /dev/null +++ b/src/org/hibernate/hql/ast/InvalidPathException.java @@ -0,0 +1,15 @@ +// $Id$ +package org.hibernate.hql.ast; + +import antlr.SemanticException; + +/** + * Exception thrown when an invalid path is found in a query. + * + * @author josh Dec 5, 2004 7:05:34 PM + */ +public class InvalidPathException extends SemanticException { + public InvalidPathException(String s) { + super( s ); + } +} diff --git a/src/org/hibernate/hql/ast/InvalidWithClauseException.java b/src/org/hibernate/hql/ast/InvalidWithClauseException.java new file mode 100644 index 0000000000..61a9f28d47 --- /dev/null +++ b/src/org/hibernate/hql/ast/InvalidWithClauseException.java @@ -0,0 +1,18 @@ +package org.hibernate.hql.ast; + +import org.hibernate.QueryException; + +/** + * {@inheritDoc} + * + * @author Steve Ebersole + */ +public class InvalidWithClauseException extends QuerySyntaxException { + public InvalidWithClauseException(String message) { + super( message ); + } + + public InvalidWithClauseException(String message, String queryString) { + super( message, queryString ); + } +} diff --git a/src/org/hibernate/hql/ast/ParameterTranslationsImpl.java b/src/org/hibernate/hql/ast/ParameterTranslationsImpl.java new file mode 100644 index 0000000000..fc5bc7b221 --- /dev/null +++ b/src/org/hibernate/hql/ast/ParameterTranslationsImpl.java @@ -0,0 +1,149 @@ +package org.hibernate.hql.ast; + +import org.hibernate.hql.ParameterTranslations; +import org.hibernate.type.Type; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.param.PositionalParameterSpecification; +import org.hibernate.param.NamedParameterSpecification; +import org.hibernate.util.ArrayHelper; + +import java.util.Map; +import java.util.Set; +import java.util.List; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.io.Serializable; + +/** + * Defines the information available for parameters encountered during + * query translation through the antlr-based parser. + * + * @author Steve Ebersole + */ +public class ParameterTranslationsImpl implements ParameterTranslations { + + private final Map namedParameters; + private final ParameterInfo[] ordinalParameters; + + public boolean supportsOrdinalParameterMetadata() { + return true; + } + + public int getOrdinalParameterCount() { + return ordinalParameters.length; + } + + public ParameterInfo getOrdinalParameterInfo(int ordinalPosition) { + // remember that ordinal parameters numbers are 1-based!!! + return ordinalParameters[ordinalPosition - 1]; + } + + public int getOrdinalParameterSqlLocation(int ordinalPosition) { + return getOrdinalParameterInfo( ordinalPosition ).getSqlLocations()[0]; + } + + public Type getOrdinalParameterExpectedType(int ordinalPosition) { + return getOrdinalParameterInfo( ordinalPosition ).getExpectedType(); + } + + public Set getNamedParameterNames() { + return namedParameters.keySet(); + } + + public ParameterInfo getNamedParameterInfo(String name) { + return ( ParameterInfo ) namedParameters.get( name ); + } + + public int[] getNamedParameterSqlLocations(String name) { + return getNamedParameterInfo( name ).getSqlLocations(); + } + + public Type getNamedParameterExpectedType(String name) { + return getNamedParameterInfo( name ).getExpectedType(); + } + + /** + * Constructs a parameter metadata object given a list of parameter + * specifications. + *

    + * Note: the order in the incoming list denotes the parameter's + * psudeo-position within the resulting sql statement. + * + * @param parameterSpecifications + */ + public ParameterTranslationsImpl(List parameterSpecifications) { + + class NamedParamTempHolder { + String name; + Type type; + List positions = new ArrayList(); + } + + int size = parameterSpecifications.size(); + List ordinalParameterList = new ArrayList(); + Map namedParameterMap = new HashMap(); + for ( int i = 0; i < size; i++ ) { + final ParameterSpecification spec = ( ParameterSpecification ) parameterSpecifications.get( i ); + if ( PositionalParameterSpecification.class.isAssignableFrom( spec.getClass() ) ) { + PositionalParameterSpecification ordinalSpec = ( PositionalParameterSpecification ) spec; + ordinalParameterList.add( new ParameterInfo( i, ordinalSpec.getExpectedType() ) ); + } + else if ( NamedParameterSpecification.class.isAssignableFrom( spec.getClass() ) ) { + NamedParameterSpecification namedSpec = ( NamedParameterSpecification ) spec; + NamedParamTempHolder paramHolder = ( NamedParamTempHolder ) namedParameterMap.get( namedSpec.getName() ); + if ( paramHolder == null ) { + paramHolder = new NamedParamTempHolder(); + paramHolder.name = namedSpec.getName(); + paramHolder.type = namedSpec.getExpectedType(); + namedParameterMap.put( namedSpec.getName(), paramHolder ); + } + paramHolder.positions.add( new Integer( i ) ); + } + else { + // don't care about other param types here, just those explicitly user-defined... + } + } + + ordinalParameters = ( ParameterInfo[] ) ordinalParameterList.toArray( new ParameterInfo[ordinalParameterList.size()] ); + + if ( namedParameterMap.isEmpty() ) { + namedParameters = java.util.Collections.EMPTY_MAP; + } + else { + Map namedParametersBacking = new HashMap( namedParameterMap.size() ); + Iterator itr = namedParameterMap.values().iterator(); + while( itr.hasNext() ) { + final NamedParamTempHolder holder = ( NamedParamTempHolder ) itr.next(); + namedParametersBacking.put( + holder.name, + new ParameterInfo( ArrayHelper.toIntArray( holder.positions ), holder.type ) + ); + } + namedParameters = java.util.Collections.unmodifiableMap( namedParametersBacking ); + } + } + + public static class ParameterInfo implements Serializable { + private final int[] sqlLocations; + private final Type expectedType; + + public ParameterInfo(int[] sqlPositions, Type expectedType) { + this.sqlLocations = sqlPositions; + this.expectedType = expectedType; + } + + public ParameterInfo(int sqlPosition, Type expectedType) { + this.sqlLocations = new int[] { sqlPosition }; + this.expectedType = expectedType; + } + + public int[] getSqlLocations() { + return sqlLocations; + } + + public Type getExpectedType() { + return expectedType; + } + } +} diff --git a/src/org/hibernate/hql/ast/ParseErrorHandler.java b/src/org/hibernate/hql/ast/ParseErrorHandler.java new file mode 100644 index 0000000000..8afb3e9aee --- /dev/null +++ b/src/org/hibernate/hql/ast/ParseErrorHandler.java @@ -0,0 +1,21 @@ +// $Id$ + +package org.hibernate.hql.ast; + +import org.hibernate.QueryException; + + +/** + * Defines the behavior of an error handler for the HQL parsers. + * User: josh + * Date: Dec 6, 2003 + * Time: 12:20:43 PM + */ +public interface ParseErrorHandler extends ErrorReporter { + + int getErrorCount(); + + // --Commented out by Inspection (12/11/04 10:56 AM): int getWarningCount(); + + void throwQueryException() throws QueryException; +} diff --git a/src/org/hibernate/hql/ast/QuerySyntaxException.java b/src/org/hibernate/hql/ast/QuerySyntaxException.java new file mode 100644 index 0000000000..e6297e981f --- /dev/null +++ b/src/org/hibernate/hql/ast/QuerySyntaxException.java @@ -0,0 +1,34 @@ +// $Id$ +package org.hibernate.hql.ast; + +import antlr.RecognitionException; +import org.hibernate.QueryException; + +/** + * Exception thrown when there is a syntax error in the HQL. + * + * @author josh + */ +public class QuerySyntaxException extends QueryException { + + public QuerySyntaxException(String message) { + super( message ); + } + + public QuerySyntaxException(String message, String hql) { + this( message ); + setQueryString( hql ); + } + + public static QuerySyntaxException convert(RecognitionException e) { + return convert( e, null ); + } + + public static QuerySyntaxException convert(RecognitionException e, String hql) { + String positionInfo = e.getLine() > 0 && e.getColumn() > 0 + ? " near line " + e.getLine() + ", column " + e.getColumn() + : ""; + return new QuerySyntaxException( e.getMessage() + positionInfo, hql ); + } + +} diff --git a/src/org/hibernate/hql/ast/QueryTranslatorImpl.java b/src/org/hibernate/hql/ast/QueryTranslatorImpl.java new file mode 100644 index 0000000000..5c027a95d4 --- /dev/null +++ b/src/org/hibernate/hql/ast/QueryTranslatorImpl.java @@ -0,0 +1,577 @@ +// $Id$ +package org.hibernate.hql.ast; + +import antlr.ANTLRException; +import antlr.RecognitionException; +import antlr.TokenStreamException; +import antlr.collections.AST; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.ScrollableResults; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.RowSelection; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.hql.FilterTranslator; +import org.hibernate.hql.QueryExecutionRequestException; +import org.hibernate.hql.ParameterTranslations; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.antlr.HqlTokenTypes; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.exec.BasicExecutor; +import org.hibernate.hql.ast.exec.MultiTableDeleteExecutor; +import org.hibernate.hql.ast.exec.MultiTableUpdateExecutor; +import org.hibernate.hql.ast.exec.StatementExecutor; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.hql.ast.tree.InsertStatement; +import org.hibernate.hql.ast.tree.QueryNode; +import org.hibernate.hql.ast.tree.Statement; +import org.hibernate.hql.ast.util.ASTPrinter; +import org.hibernate.hql.ast.util.NodeTraverser; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.loader.hql.QueryLoader; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.type.Type; +import org.hibernate.util.IdentitySet; +import org.hibernate.util.StringHelper; +import org.hibernate.util.ReflectHelper; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.ArrayList; + +/** + * A QueryTranslator that uses an Antlr-based parser. + * + * @author Joshua Davis (pgmjsd@sourceforge.net) + */ +public class QueryTranslatorImpl implements FilterTranslator { + + private static final Log log = LogFactory.getLog( QueryTranslatorImpl.class ); + private static final Log AST_LOG = LogFactory.getLog( "org.hibernate.hql.ast.AST" ); + + private SessionFactoryImplementor factory; + + private final String queryIdentifier; + private String hql; + private boolean shallowQuery; + private Map tokenReplacements; + + private Map enabledFilters; //TODO:this is only needed during compilation .. can we eliminate the instvar? + + private boolean compiled; + private QueryLoader queryLoader; + private StatementExecutor statementExecutor; + + private Statement sqlAst; + private String sql; + + private ParameterTranslations paramTranslations; + + /** + * Creates a new AST-based query translator. + * + * @param queryIdentifier The query-identifier (used in stats collection) + * @param query The hql query to translate + * @param enabledFilters Currently enabled filters + * @param factory The session factory constructing this translator instance. + */ + public QueryTranslatorImpl( + String queryIdentifier, + String query, + Map enabledFilters, + SessionFactoryImplementor factory) { + this.queryIdentifier = queryIdentifier; + this.hql = query; + this.compiled = false; + this.shallowQuery = false; + this.enabledFilters = enabledFilters; + this.factory = factory; + } + + /** + * Compile a "normal" query. This method may be called multiple + * times. Subsequent invocations are no-ops. + * + * @param replacements Defined query substitutions. + * @param shallow Does this represent a shallow (scalar or entity-id) select? + * @throws QueryException There was a problem parsing the query string. + * @throws MappingException There was a problem querying defined mappings. + */ + public void compile( + Map replacements, + boolean shallow) throws QueryException, MappingException { + doCompile( replacements, shallow, null ); + } + + /** + * Compile a filter. This method may be called multiple + * times. Subsequent invocations are no-ops. + * + * @param collectionRole the role name of the collection used as the basis for the filter. + * @param replacements Defined query substitutions. + * @param shallow Does this represent a shallow (scalar or entity-id) select? + * @throws QueryException There was a problem parsing the query string. + * @throws MappingException There was a problem querying defined mappings. + */ + public void compile( + String collectionRole, + Map replacements, + boolean shallow) throws QueryException, MappingException { + doCompile( replacements, shallow, collectionRole ); + } + + /** + * Performs both filter and non-filter compiling. + * + * @param replacements Defined query substitutions. + * @param shallow Does this represent a shallow (scalar or entity-id) select? + * @param collectionRole the role name of the collection used as the basis for the filter, NULL if this + * is not a filter. + */ + private synchronized void doCompile(Map replacements, boolean shallow, String collectionRole) { + // If the query is already compiled, skip the compilation. + if ( compiled ) { + if ( log.isDebugEnabled() ) { + log.debug( "compile() : The query is already compiled, skipping..." ); + } + return; + } + + // Remember the parameters for the compilation. + this.tokenReplacements = replacements; + if ( tokenReplacements == null ) { + tokenReplacements = new HashMap(); + } + this.shallowQuery = shallow; + + try { + // PHASE 1 : Parse the HQL into an AST. + HqlParser parser = parse( true ); + + // PHASE 2 : Analyze the HQL AST, and produce an SQL AST. + HqlSqlWalker w = analyze( parser, collectionRole ); + + sqlAst = ( Statement ) w.getAST(); + + // at some point the generate phase needs to be moved out of here, + // because a single object-level DML might spawn multiple SQL DML + // command executions. + // + // Possible to just move the sql generation for dml stuff, but for + // consistency-sake probably best to just move responsiblity for + // the generation phase completely into the delegates + // (QueryLoader/StatementExecutor) themselves. Also, not sure why + // QueryLoader currently even has a dependency on this at all; does + // it need it? Ideally like to see the walker itself given to the delegates directly... + + if ( sqlAst.needsExecutor() ) { + statementExecutor = buildAppropriateStatementExecutor( w ); + } + else { + // PHASE 3 : Generate the SQL. + generate( ( QueryNode ) sqlAst ); + queryLoader = new QueryLoader( this, factory, w.getSelectClause() ); + } + + compiled = true; + } + catch ( QueryException qe ) { + qe.setQueryString( hql ); + throw qe; + } + catch ( RecognitionException e ) { + // we do not actually propogate ANTLRExceptions as a cause, so + // log it here for diagnostic purposes + if ( log.isTraceEnabled() ) { + log.trace( "converted antlr.RecognitionException", e ); + } + throw QuerySyntaxException.convert( e, hql ); + } + catch ( ANTLRException e ) { + // we do not actually propogate ANTLRExceptions as a cause, so + // log it here for diagnostic purposes + if ( log.isTraceEnabled() ) { + log.trace( "converted antlr.ANTLRException", e ); + } + throw new QueryException( e.getMessage(), hql ); + } + + this.enabledFilters = null; //only needed during compilation phase... + } + + private void generate(AST sqlAst) throws QueryException, RecognitionException { + if ( sql == null ) { + SqlGenerator gen = new SqlGenerator(factory); + gen.statement( sqlAst ); + sql = gen.getSQL(); + if ( log.isDebugEnabled() ) { + log.debug( "HQL: " + hql ); + log.debug( "SQL: " + sql ); + } + gen.getParseErrorHandler().throwQueryException(); + } + } + + private HqlSqlWalker analyze(HqlParser parser, String collectionRole) throws QueryException, RecognitionException { + HqlSqlWalker w = new HqlSqlWalker( this, factory, parser, tokenReplacements, collectionRole ); + AST hqlAst = parser.getAST(); + + // Transform the tree. + w.statement( hqlAst ); + + if ( AST_LOG.isDebugEnabled() ) { + ASTPrinter printer = new ASTPrinter( SqlTokenTypes.class ); + AST_LOG.debug( printer.showAsString( w.getAST(), "--- SQL AST ---" ) ); + } + + w.getParseErrorHandler().throwQueryException(); + + return w; + } + + private HqlParser parse(boolean filter) throws TokenStreamException, RecognitionException { + // Parse the query string into an HQL AST. + HqlParser parser = HqlParser.getInstance( hql ); + parser.setFilter( filter ); + + if ( log.isDebugEnabled() ) { + log.debug( "parse() - HQL: " + hql ); + } + parser.statement(); + + AST hqlAst = parser.getAST(); + + JavaConstantConverter converter = new JavaConstantConverter(); + NodeTraverser walker = new NodeTraverser( converter ); + walker.traverseDepthFirst( hqlAst ); + + showHqlAst( hqlAst ); + + parser.getParseErrorHandler().throwQueryException(); + return parser; + } + + void showHqlAst(AST hqlAst) { + if ( AST_LOG.isDebugEnabled() ) { + ASTPrinter printer = new ASTPrinter( HqlTokenTypes.class ); + printer.setShowClassNames( false ); // The class names aren't interesting in the first tree. + AST_LOG.debug( printer.showAsString( hqlAst, "--- HQL AST ---" ) ); + } + } + + private void errorIfDML() throws HibernateException { + if ( sqlAst.needsExecutor() ) { + throw new QueryExecutionRequestException( "Not supported for DML operations", hql ); + } + } + + private void errorIfSelect() throws HibernateException { + if ( !sqlAst.needsExecutor() ) { + throw new QueryExecutionRequestException( "Not supported for select queries", hql ); + } + } + + public String getQueryIdentifier() { + return queryIdentifier; + } + + public Statement getSqlAST() { + return sqlAst; + } + + private HqlSqlWalker getWalker() { + return sqlAst.getWalker(); + } + + /** + * Types of the return values of an iterate() style query. + * + * @return an array of Types. + */ + public Type[] getReturnTypes() { + errorIfDML(); + return getWalker().getReturnTypes(); + } + + public String[] getReturnAliases() { + errorIfDML(); + return getWalker().getReturnAliases(); + } + + public String[][] getColumnNames() { + errorIfDML(); + return getWalker().getSelectClause().getColumnNames(); + } + + public Set getQuerySpaces() { + return getWalker().getQuerySpaces(); + } + + public List list(SessionImplementor session, QueryParameters queryParameters) + throws HibernateException { + // Delegate to the QueryLoader... + errorIfDML(); + QueryNode query = ( QueryNode ) sqlAst; + boolean hasLimit = queryParameters.getRowSelection() != null && queryParameters.getRowSelection().definesLimits(); + boolean needsDistincting = ( query.getSelectClause().isDistinct() || hasLimit ) && containsCollectionFetches(); + + QueryParameters queryParametersToUse; + if ( hasLimit && containsCollectionFetches() ) { + log.warn( "firstResult/maxResults specified with collection fetch; applying in memory!" ); + RowSelection selection = new RowSelection(); + selection.setFetchSize( queryParameters.getRowSelection().getFetchSize() ); + selection.setTimeout( queryParameters.getRowSelection().getTimeout() ); + queryParametersToUse = queryParameters.createCopyUsing( selection ); + } + else { + queryParametersToUse = queryParameters; + } + + List results = queryLoader.list( session, queryParametersToUse ); + + if ( needsDistincting ) { + int includedCount = -1; + // NOTE : firstRow is zero-based + int first = !hasLimit || queryParameters.getRowSelection().getFirstRow() == null + ? 0 + : queryParameters.getRowSelection().getFirstRow().intValue(); + int max = !hasLimit || queryParameters.getRowSelection().getMaxRows() == null + ? -1 + : queryParameters.getRowSelection().getMaxRows().intValue(); + int size = results.size(); + List tmp = new ArrayList(); + IdentitySet distinction = new IdentitySet(); + for ( int i = 0; i < size; i++ ) { + final Object result = results.get( i ); + if ( !distinction.add( result ) ) { + continue; + } + includedCount++; + if ( includedCount < first ) { + continue; + } + tmp.add( result ); + // NOTE : ( max - 1 ) because first is zero-based while max is not... + if ( max >= 0 && ( includedCount - first ) >= ( max - 1 ) ) { + break; + } + } + results = tmp; + } + + return results; + } + + /** + * Return the query results as an iterator + */ + public Iterator iterate(QueryParameters queryParameters, EventSource session) + throws HibernateException { + // Delegate to the QueryLoader... + errorIfDML(); + return queryLoader.iterate( queryParameters, session ); + } + + /** + * Return the query results, as an instance of ScrollableResults + */ + public ScrollableResults scroll(QueryParameters queryParameters, SessionImplementor session) + throws HibernateException { + // Delegate to the QueryLoader... + errorIfDML(); + return queryLoader.scroll( queryParameters, session ); + } + + public int executeUpdate(QueryParameters queryParameters, SessionImplementor session) + throws HibernateException { + errorIfSelect(); + return statementExecutor.execute( queryParameters, session ); + } + + /** + * The SQL query string to be called; implemented by all subclasses + */ + public String getSQLString() { + return sql; + } + + public List collectSqlStrings() { + ArrayList list = new ArrayList(); + if ( isManipulationStatement() ) { + String[] sqlStatements = statementExecutor.getSqlStatements(); + for ( int i = 0; i < sqlStatements.length; i++ ) { + list.add( sqlStatements[i] ); + } + } + else { + list.add( sql ); + } + return list; + } + + // -- Package local methods for the QueryLoader delegate -- + + public boolean isShallowQuery() { + return shallowQuery; + } + + public String getQueryString() { + return hql; + } + + public Map getEnabledFilters() { + return enabledFilters; + } + + public int[] getNamedParameterLocs(String name) { + return getWalker().getNamedParameterLocations( name ); + } + + public boolean containsCollectionFetches() { + errorIfDML(); + List collectionFetches = ( ( QueryNode ) sqlAst ).getFromClause().getCollectionFetches(); + return collectionFetches != null && collectionFetches.size() > 0; + } + + public boolean isManipulationStatement() { + return sqlAst.needsExecutor(); + } + + public void validateScrollability() throws HibernateException { + // Impl Note: allows multiple collection fetches as long as the + // entire fecthed graph still "points back" to a single + // root entity for return + + errorIfDML(); + + QueryNode query = ( QueryNode ) sqlAst; + + // If there are no collection fetches, then no further checks are needed + List collectionFetches = query.getFromClause().getCollectionFetches(); + if ( collectionFetches.isEmpty() ) { + return; + } + + // A shallow query is ok (although technically there should be no fetching here...) + if ( isShallowQuery() ) { + return; + } + + // Otherwise, we have a non-scalar select with defined collection fetch(es). + // Make sure that there is only a single root entity in the return (no tuples) + if ( getReturnTypes().length > 1 ) { + throw new HibernateException( "cannot scroll with collection fetches and returned tuples" ); + } + + FromElement owner = null; + Iterator itr = query.getSelectClause().getFromElementsForLoad().iterator(); + while ( itr.hasNext() ) { + // should be the first, but just to be safe... + final FromElement fromElement = ( FromElement ) itr.next(); + if ( fromElement.getOrigin() == null ) { + owner = fromElement; + break; + } + } + + if ( owner == null ) { + throw new HibernateException( "unable to locate collection fetch(es) owner for scrollability checks" ); + } + + // This is not strictly true. We actually just need to make sure that + // it is ordered by root-entity PK and that that order-by comes before + // any non-root-entity ordering... + + AST primaryOrdering = query.getOrderByClause().getFirstChild(); + if ( primaryOrdering != null ) { + // TODO : this is a bit dodgy, come up with a better way to check this (plus see above comment) + String [] idColNames = owner.getQueryable().getIdentifierColumnNames(); + String expectedPrimaryOrderSeq = StringHelper.join( + ", ", + StringHelper.qualify( owner.getTableAlias(), idColNames ) + ); + if ( !primaryOrdering.getText().startsWith( expectedPrimaryOrderSeq ) ) { + throw new HibernateException( "cannot scroll results with collection fetches which are not ordered primarily by the root entity's PK" ); + } + } + } + + private StatementExecutor buildAppropriateStatementExecutor(HqlSqlWalker walker) { + Statement statement = ( Statement ) walker.getAST(); + if ( walker.getStatementType() == HqlSqlTokenTypes.DELETE ) { + FromElement fromElement = walker.getFinalFromClause().getFromElement(); + Queryable persister = fromElement.getQueryable(); + if ( persister.isMultiTable() ) { + return new MultiTableDeleteExecutor( walker ); + } + else { + return new BasicExecutor( walker, persister ); + } + } + else if ( walker.getStatementType() == HqlSqlTokenTypes.UPDATE ) { + FromElement fromElement = walker.getFinalFromClause().getFromElement(); + Queryable persister = fromElement.getQueryable(); + if ( persister.isMultiTable() ) { + // even here, if only properties mapped to the "base table" are referenced + // in the set and where clauses, this could be handled by the BasicDelegate. + // TODO : decide if it is better performance-wise to perform that check, or to simply use the MultiTableUpdateDelegate + return new MultiTableUpdateExecutor( walker ); + } + else { + return new BasicExecutor( walker, persister ); + } + } + else if ( walker.getStatementType() == HqlSqlTokenTypes.INSERT ) { + return new BasicExecutor( walker, ( ( InsertStatement ) statement ).getIntoClause().getQueryable() ); + } + else { + throw new QueryException( "Unexpected statement type" ); + } + } + + public ParameterTranslations getParameterTranslations() { + if ( paramTranslations == null ) { + paramTranslations = new ParameterTranslationsImpl( getWalker().getParameters() ); + } + return paramTranslations; + } + + public static class JavaConstantConverter implements NodeTraverser.VisitationStrategy { + private AST dotRoot; + public void visit(AST node) { + if ( dotRoot != null ) { + // we are already processing a dot-structure + if ( ASTUtil.isSubtreeChild( dotRoot, node ) ) { + // igndore it... + return; + } + else { + // we are now at a new tree level + dotRoot = null; + } + } + + if ( dotRoot == null && node.getType() == HqlTokenTypes.DOT ) { + dotRoot = node; + handleDotStructure( dotRoot ); + } + } + private void handleDotStructure(AST dotStructureRoot) { + String expression = ASTUtil.getPathText( dotStructureRoot ); + Object constant = ReflectHelper.getConstantValue( expression ); + if ( constant != null ) { + dotStructureRoot.setFirstChild( null ); + dotStructureRoot.setType( HqlTokenTypes.JAVA_CONSTANT ); + dotStructureRoot.setText( expression ); + } + } + } +} diff --git a/src/org/hibernate/hql/ast/SqlASTFactory.java b/src/org/hibernate/hql/ast/SqlASTFactory.java new file mode 100644 index 0000000000..e97071f227 --- /dev/null +++ b/src/org/hibernate/hql/ast/SqlASTFactory.java @@ -0,0 +1,221 @@ +// $Id$ +package org.hibernate.hql.ast; + +import antlr.ASTFactory; +import antlr.Token; +import antlr.collections.AST; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.ast.tree.AggregateNode; +import org.hibernate.hql.ast.tree.BinaryArithmeticOperatorNode; +import org.hibernate.hql.ast.tree.BinaryLogicOperatorNode; +import org.hibernate.hql.ast.tree.Case2Node; +import org.hibernate.hql.ast.tree.CaseNode; +import org.hibernate.hql.ast.tree.CollectionFunction; +import org.hibernate.hql.ast.tree.ConstructorNode; +import org.hibernate.hql.ast.tree.CountNode; +import org.hibernate.hql.ast.tree.DeleteStatement; +import org.hibernate.hql.ast.tree.DotNode; +import org.hibernate.hql.ast.tree.FromClause; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.hql.ast.tree.IdentNode; +import org.hibernate.hql.ast.tree.ImpliedFromElement; +import org.hibernate.hql.ast.tree.IndexNode; +import org.hibernate.hql.ast.tree.InitializeableNode; +import org.hibernate.hql.ast.tree.InsertStatement; +import org.hibernate.hql.ast.tree.IntoClause; +import org.hibernate.hql.ast.tree.LiteralNode; +import org.hibernate.hql.ast.tree.MethodNode; +import org.hibernate.hql.ast.tree.OrderByClause; +import org.hibernate.hql.ast.tree.ParameterNode; +import org.hibernate.hql.ast.tree.QueryNode; +import org.hibernate.hql.ast.tree.SelectClause; +import org.hibernate.hql.ast.tree.SelectExpressionImpl; +import org.hibernate.hql.ast.tree.SqlFragment; +import org.hibernate.hql.ast.tree.SqlNode; +import org.hibernate.hql.ast.tree.UnaryArithmeticNode; +import org.hibernate.hql.ast.tree.UpdateStatement; +import org.hibernate.hql.ast.tree.BetweenOperatorNode; +import org.hibernate.hql.ast.tree.UnaryLogicOperatorNode; +import org.hibernate.hql.ast.tree.InLogicOperatorNode; +import org.hibernate.hql.ast.tree.JavaConstantNode; +import org.hibernate.hql.ast.tree.SessionFactoryAwareNode; +import org.hibernate.hql.ast.tree.BooleanLiteralNode; + +import java.lang.reflect.Constructor; + +/** + * Custom AST factory the intermediate tree that causes ANTLR to create specialized + * AST nodes, given the AST node type (from HqlSqlTokenTypes). HqlSqlWalker registers + * this factory with itself when it is initialized. + * + * @author Joshua + */ +public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes { + private HqlSqlWalker walker; + + /** + * Create factory with a specific mapping from token type + * to Java AST node type. Your subclasses of ASTFactory + * can override and reuse the map stuff. + */ + public SqlASTFactory(HqlSqlWalker walker) { + super(); + this.walker = walker; + } + + /** + * Returns the class for a given token type (a.k.a. AST node type). + * + * @param tokenType The token type. + * @return Class - The AST node class to instantiate. + */ + public Class getASTNodeType(int tokenType) { + switch ( tokenType ) { + case SELECT: + case QUERY: + return QueryNode.class; + case UPDATE: + return UpdateStatement.class; + case DELETE: + return DeleteStatement.class; + case INSERT: + return InsertStatement.class; + case INTO: + return IntoClause.class; + case FROM: + return FromClause.class; + case FROM_FRAGMENT: + return FromElement.class; + case IMPLIED_FROM: + return ImpliedFromElement.class; + case DOT: + return DotNode.class; + case INDEX_OP: + return IndexNode.class; + // Alias references and identifiers use the same node class. + case ALIAS_REF: + case IDENT: + return IdentNode.class; + case SQL_TOKEN: + return SqlFragment.class; + case METHOD_CALL: + return MethodNode.class; + case ELEMENTS: + case INDICES: + return CollectionFunction.class; + case SELECT_CLAUSE: + return SelectClause.class; + case SELECT_EXPR: + return SelectExpressionImpl.class; + case AGGREGATE: + return AggregateNode.class; + case COUNT: + return CountNode.class; + case CONSTRUCTOR: + return ConstructorNode.class; + case NUM_INT: + case NUM_FLOAT: + case NUM_LONG: + case NUM_DOUBLE: + case QUOTED_STRING: + return LiteralNode.class; + case TRUE: + case FALSE: + return BooleanLiteralNode.class; + case JAVA_CONSTANT: + return JavaConstantNode.class; + case ORDER: + return OrderByClause.class; + case PLUS: + case MINUS: + case STAR: + case DIV: + return BinaryArithmeticOperatorNode.class; + case UNARY_MINUS: + case UNARY_PLUS: + return UnaryArithmeticNode.class; + case CASE2: + return Case2Node.class; + case CASE: + return CaseNode.class; + case PARAM: + case NAMED_PARAM: + return ParameterNode.class; + case EQ: + case NE: + case LT: + case GT: + case LE: + case GE: + case LIKE: + case NOT_LIKE: + return BinaryLogicOperatorNode.class; + case IN: + case NOT_IN: + return InLogicOperatorNode.class; + case BETWEEN: + case NOT_BETWEEN: + return BetweenOperatorNode.class; + case IS_NULL: + case IS_NOT_NULL: + case EXISTS: + return UnaryLogicOperatorNode.class; + default: + return SqlNode.class; + } // switch + } + + protected AST createUsingCtor(Token token, String className) { + Class c; + AST t; + try { + c = Class.forName( className ); + Class[] tokenArgType = new Class[]{antlr.Token.class}; + Constructor ctor = c.getConstructor( tokenArgType ); + if ( ctor != null ) { + t = ( AST ) ctor.newInstance( new Object[]{token} ); // make a new one + initializeSqlNode( t ); + } + else { + // just do the regular thing if you can't find the ctor + // Your AST must have default ctor to use this. + t = create( c ); + } + } + catch ( Exception e ) { + throw new IllegalArgumentException( "Invalid class or can't make instance, " + className ); + } + return t; + } + + private void initializeSqlNode(AST t) { + // Initialize SQL nodes here. + if ( t instanceof InitializeableNode ) { + InitializeableNode initializeableNode = ( InitializeableNode ) t; + initializeableNode.initialize( walker ); + } + if ( t instanceof SessionFactoryAwareNode ) { + ( ( SessionFactoryAwareNode ) t ).setSessionFactory( walker.getSessionFactoryHelper().getFactory() ); + } + } + + /** + * Actually instantiate the AST node. + * + * @param c The class to instantiate. + * @return The instantiated and initialized node. + */ + protected AST create(Class c) { + AST t; + try { + t = ( AST ) c.newInstance(); // make a new one + initializeSqlNode( t ); + } + catch ( Exception e ) { + error( "Can't create AST Node " + c.getName() ); + return null; + } + return t; + } + +} diff --git a/src/org/hibernate/hql/ast/SqlGenerator.java b/src/org/hibernate/hql/ast/SqlGenerator.java new file mode 100644 index 0000000000..7adc781075 --- /dev/null +++ b/src/org/hibernate/hql/ast/SqlGenerator.java @@ -0,0 +1,267 @@ +// $Id$ +package org.hibernate.hql.ast; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; + +import antlr.RecognitionException; +import antlr.collections.AST; +import org.hibernate.QueryException; +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.antlr.SqlGeneratorBase; +import org.hibernate.hql.ast.tree.MethodNode; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.hql.ast.tree.Node; + +/** + * Generates SQL by overriding callback methods in the base class, which does + * the actual SQL AST walking. + * + * @author Joshua Davis + * @author Steve Ebersole + */ +public class SqlGenerator extends SqlGeneratorBase implements ErrorReporter { + /** + * Handles parser errors. + */ + private ParseErrorHandler parseErrorHandler; + + /** + * all append invocations on the buf should go through this Output instance variable. + * The value of this variable may be temporarily substitued by sql function processing code + * to catch generated arguments. + * This is because sql function templates need arguments as seperate string chunks + * that will be assembled into the target dialect-specific function call. + */ + private SqlWriter writer = new DefaultWriter(); + + private SessionFactoryImplementor sessionFactory; + + private LinkedList outputStack = new LinkedList(); + + protected void out(String s) { + writer.clause( s ); + } + + protected void out(AST n) { + if ( n instanceof Node ) { + out( ( ( Node ) n ).getRenderText( sessionFactory ) ); + } + else { + super.out( n ); + } + } + + protected void commaBetweenParameters(String comma) { + writer.commaBetweenParameters( comma ); + } + + public void reportError(RecognitionException e) { + parseErrorHandler.reportError( e ); // Use the delegate. + } + + public void reportError(String s) { + parseErrorHandler.reportError( s ); // Use the delegate. + } + + public void reportWarning(String s) { + parseErrorHandler.reportWarning( s ); + } + + public ParseErrorHandler getParseErrorHandler() { + return parseErrorHandler; + } + + public SqlGenerator(SessionFactoryImplementor sfi) { + super(); + parseErrorHandler = new ErrorCounter(); + sessionFactory = sfi; + } + + public String getSQL() { + return getStringBuffer().toString(); + } + + protected void optionalSpace() { + int c = getLastChar(); + switch ( c ) { + case -1: + return; + case ' ': + return; + case ')': + return; + case '(': + return; + default: + out( " " ); + } + } + + protected void beginFunctionTemplate(AST m, AST i) { + MethodNode methodNode = ( MethodNode ) m; + SQLFunction template = methodNode.getSQLFunction(); + if ( template == null ) { + // if template is null we just write the function out as it appears in the hql statement + super.beginFunctionTemplate( m, i ); + } + else { + // this function has a template -> redirect output and catch the arguments + outputStack.addFirst( writer ); + writer = new FunctionArguments(); + } + } + + protected void endFunctionTemplate(AST m) { + MethodNode methodNode = ( MethodNode ) m; + SQLFunction template = methodNode.getSQLFunction(); + if ( template == null ) { + super.endFunctionTemplate( m ); + } + else { + // this function has a template -> restore output, apply the template and write the result out + FunctionArguments functionArguments = ( FunctionArguments ) writer; // TODO: Downcast to avoid using an interface? Yuck. + writer = ( SqlWriter ) outputStack.removeFirst(); + out( template.render( functionArguments.getArgs(), sessionFactory ) ); + } + } + + // --- Inner classes (moved here from sql-gen.g) --- + + /** + * Writes SQL fragments. + */ + interface SqlWriter { + void clause(String clause); + + /** + * todo remove this hack + * The parameter is either ", " or " , ". This is needed to pass sql generating tests as the old + * sql generator uses " , " in the WHERE and ", " in SELECT. + * + * @param comma either " , " or ", " + */ + void commaBetweenParameters(String comma); + } + + /** + * SQL function processing code redirects generated SQL output to an instance of this class + * which catches function arguments. + */ + class FunctionArguments implements SqlWriter { + private int argInd; + private final List args = new ArrayList( 3 ); + + public void clause(String clause) { + if ( argInd == args.size() ) { + args.add( clause ); + } + else { + args.set( argInd, args.get( argInd ) + clause ); + } + } + + public void commaBetweenParameters(String comma) { + ++argInd; + } + + public List getArgs() { + return args; + } + } + + /** + * The default SQL writer. + */ + class DefaultWriter implements SqlWriter { + public void clause(String clause) { + getStringBuffer().append( clause ); + } + + public void commaBetweenParameters(String comma) { + getStringBuffer().append( comma ); + } + } + + public static void panic() { + throw new QueryException( "TreeWalker: panic" ); + } + + protected void fromFragmentSeparator(AST a) { + // check two "adjecent" nodes at the top of the from-clause tree + AST next = a.getNextSibling(); + if ( next == null || !hasText( a ) ) { + return; + } + + FromElement left = ( FromElement ) a; + FromElement right = ( FromElement ) next; + + /////////////////////////////////////////////////////////////////////// + // HACK ALERT !!!!!!!!!!!!!!!!!!!!!!!!!!!! + // Attempt to work around "ghost" ImpliedFromElements that occasionally + // show up between the actual things being joined. This consistently + // occurs from index nodes (at least against many-to-many). Not sure + // if there are other conditions + // + // Essentially, look-ahead to the next FromElement that actually + // writes something to the SQL + while ( right != null && !hasText( right ) ) { + right = ( FromElement ) right.getNextSibling(); + } + if ( right == null ) { + return; + } + /////////////////////////////////////////////////////////////////////// + + if ( !hasText( right ) ) { + return; + } + + if ( right.getRealOrigin() == left || + ( right.getRealOrigin() != null && right.getRealOrigin() == left.getRealOrigin() ) ) { + // right represents a joins originating from left; or + // both right and left reprersent joins originating from the same FromElement + if ( right.getJoinSequence() != null && right.getJoinSequence().isThetaStyle() ) { + out( ", " ); + } + else { + out( " " ); + } + } + else { + // these are just two unrelated table references + out( ", " ); + } + } + + protected void nestedFromFragment(AST d, AST parent) { + // check a set of parent/child nodes in the from-clause tree + // to determine if a comma is required between them + if ( d != null && hasText( d ) ) { + if ( parent != null && hasText( parent ) ) { + // again, both should be FromElements + FromElement left = ( FromElement ) parent; + FromElement right = ( FromElement ) d; + if ( right.getRealOrigin() == left ) { + // right represents a joins originating from left... + if ( right.getJoinSequence() != null && right.getJoinSequence().isThetaStyle() ) { + out( ", " ); + } + else { + out( " " ); + } + } + else { + // not so sure this is even valid subtree. but if it was, it'd + // represent two unrelated table references... + out( ", " ); + } + } + out( d ); + } + } + +} diff --git a/src/org/hibernate/hql/ast/exec/AbstractStatementExecutor.java b/src/org/hibernate/hql/ast/exec/AbstractStatementExecutor.java new file mode 100644 index 0000000000..1be0071679 --- /dev/null +++ b/src/org/hibernate/hql/ast/exec/AbstractStatementExecutor.java @@ -0,0 +1,227 @@ +// $Id$ +package org.hibernate.hql.ast.exec; + +import java.sql.PreparedStatement; +import java.sql.Connection; +import java.sql.Statement; + +import org.hibernate.HibernateException; +import org.hibernate.action.BulkOperationCleanupAction; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.transaction.Isolater; +import org.hibernate.engine.transaction.IsolatedWork; +import org.hibernate.event.EventSource; +import org.hibernate.hql.ast.HqlSqlWalker; +import org.hibernate.hql.ast.SqlGenerator; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.InsertSelect; +import org.hibernate.sql.Select; +import org.hibernate.sql.SelectFragment; +import org.hibernate.util.StringHelper; + +import antlr.RecognitionException; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; + +/** + * Implementation of AbstractStatementExecutor. + * + * @author Steve Ebersole + */ +public abstract class AbstractStatementExecutor implements StatementExecutor { + + private final Log log; + private final HqlSqlWalker walker; + + public AbstractStatementExecutor(HqlSqlWalker walker, Log log) { + this.walker = walker; + this.log = log; + } + + protected HqlSqlWalker getWalker() { + return walker; + } + + protected SessionFactoryImplementor getFactory() { + return walker.getSessionFactoryHelper().getFactory(); + } + + protected abstract Queryable[] getAffectedQueryables(); + + protected String generateIdInsertSelect(Queryable persister, String tableAlias, AST whereClause) { + Select select = new Select( getFactory().getDialect() ); + SelectFragment selectFragment = new SelectFragment() + .addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() ); + select.setSelectClause( selectFragment.toFragmentString().substring( 2 ) ); + + String rootTableName = persister.getTableName(); + String fromJoinFragment = persister.fromJoinFragment( tableAlias, true, false ); + String whereJoinFragment = persister.whereJoinFragment( tableAlias, true, false ); + + select.setFromClause( rootTableName + ' ' + tableAlias + fromJoinFragment ); + + if ( whereJoinFragment == null ) { + whereJoinFragment = ""; + } + else { + whereJoinFragment = whereJoinFragment.trim(); + if ( whereJoinFragment.startsWith( "and" ) ) { + whereJoinFragment = whereJoinFragment.substring( 4 ); + } + } + + String userWhereClause = ""; + if ( whereClause.getNumberOfChildren() != 0 ) { + // If a where clause was specified in the update/delete query, use it to limit the + // returned ids here... + try { + SqlGenerator sqlGenerator = new SqlGenerator( getFactory() ); + sqlGenerator.whereClause( whereClause ); + userWhereClause = sqlGenerator.getSQL().substring( 7 ); // strip the " where " + } + catch ( RecognitionException e ) { + throw new HibernateException( "Unable to generate id select for DML operation", e ); + } + if ( whereJoinFragment.length() > 0 ) { + whereJoinFragment += " and "; + } + } + + select.setWhereClause( whereJoinFragment + userWhereClause ); + + InsertSelect insert = new InsertSelect( getFactory().getDialect() ); + if ( getFactory().getSettings().isCommentsEnabled() ) { + insert.setComment( "insert-select for " + persister.getEntityName() + " ids" ); + } + insert.setTableName( persister.getTemporaryIdTableName() ); + insert.setSelect( select ); + return insert.toStatementString(); + } + + protected String generateIdSubselect(Queryable persister) { + return "select " + StringHelper.join( ", ", persister.getIdentifierColumnNames() ) + + " from " + persister.getTemporaryIdTableName(); + } + + protected void createTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) { + // Don't really know all the codes required to adequately decipher returned jdbc exceptions here. + // simply allow the failure to be eaten and the subsequent insert-selects/deletes should fail + IsolatedWork work = new IsolatedWork() { + public void doWork(Connection connection) throws HibernateException { + Statement stmnt = null; + try { + stmnt = connection.createStatement(); + stmnt.executeUpdate( persister.getTemporaryIdTableDDL() ); + } + catch( Throwable t ) { + log.debug( "unable to create temporary id table [" + t.getMessage() + "]" ); + } + finally { + if ( stmnt != null ) { + try { + stmnt.close(); + } + catch( Throwable ignore ) { + // ignore + } + } + } + } + }; + if ( shouldIsolateTemporaryTableDDL() ) { + if ( getFactory().getSettings().isDataDefinitionInTransactionSupported() ) { + Isolater.doIsolatedWork( work, session ); + } + else { + Isolater.doNonTransactedWork( work, session ); + } + } + else { + work.doWork( session.getJDBCContext().getConnectionManager().getConnection() ); + session.getJDBCContext().getConnectionManager().afterStatement(); + } + } + + protected void dropTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) { + if ( getFactory().getDialect().dropTemporaryTableAfterUse() ) { + IsolatedWork work = new IsolatedWork() { + public void doWork(Connection connection) throws HibernateException { + Statement stmnt = null; + try { + stmnt = connection.createStatement(); + stmnt.executeUpdate( "drop table " + persister.getTemporaryIdTableName() ); + } + catch( Throwable t ) { + log.warn( "unable to drop temporary id table after use [" + t.getMessage() + "]" ); + } + finally { + if ( stmnt != null ) { + try { + stmnt.close(); + } + catch( Throwable ignore ) { + // ignore + } + } + } + } + }; + + if ( shouldIsolateTemporaryTableDDL() ) { + if ( getFactory().getSettings().isDataDefinitionInTransactionSupported() ) { + Isolater.doIsolatedWork( work, session ); + } + else { + Isolater.doNonTransactedWork( work, session ); + } + } + else { + work.doWork( session.getJDBCContext().getConnectionManager().getConnection() ); + session.getJDBCContext().getConnectionManager().afterStatement(); + } + } + else { + // at the very least cleanup the data :) + PreparedStatement ps = null; + try { + ps = session.getBatcher().prepareStatement( "delete from " + persister.getTemporaryIdTableName() ); + ps.executeUpdate(); + } + catch( Throwable t ) { + log.warn( "unable to cleanup temporary id table after use [" + t + "]" ); + } + finally { + if ( ps != null ) { + try { + session.getBatcher().closeStatement( ps ); + } + catch( Throwable ignore ) { + // ignore + } + } + } + } + } + + protected void coordinateSharedCacheCleanup(SessionImplementor session) { + BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getAffectedQueryables() ); + + action.init(); + + if ( session.isEventSource() ) { + ( ( EventSource ) session ).getActionQueue().addAction( action ); + } + } + + protected boolean shouldIsolateTemporaryTableDDL() { + Boolean dialectVote = getFactory().getDialect().performTemporaryTableDDLInIsolation(); + if ( dialectVote != null ) { + return dialectVote.booleanValue(); + } + else { + return getFactory().getSettings().isDataDefinitionImplicitCommit(); + } + } +} diff --git a/src/org/hibernate/hql/ast/exec/BasicExecutor.java b/src/org/hibernate/hql/ast/exec/BasicExecutor.java new file mode 100644 index 0000000000..a3afbdf038 --- /dev/null +++ b/src/org/hibernate/hql/ast/exec/BasicExecutor.java @@ -0,0 +1,96 @@ +// $Id$ +package org.hibernate.hql.ast.exec; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Iterator; + +import org.hibernate.HibernateException; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.RowSelection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.hql.ast.HqlSqlWalker; +import org.hibernate.hql.ast.QuerySyntaxException; +import org.hibernate.hql.ast.SqlGenerator; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.persister.entity.Queryable; + +import antlr.RecognitionException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Implementation of BasicExecutor. + * + * @author Steve Ebersole + */ +public class BasicExecutor extends AbstractStatementExecutor { + private static final Log log = LogFactory.getLog( BasicExecutor.class ); + + private final Queryable persister; + private final String sql; + + public BasicExecutor(HqlSqlWalker walker, Queryable persister) { + super( walker, log ); + this.persister = persister; + try { + SqlGenerator gen = new SqlGenerator( getFactory() ); + gen.statement( walker.getAST() ); + sql = gen.getSQL(); + gen.getParseErrorHandler().throwQueryException(); + } + catch ( RecognitionException e ) { + throw QuerySyntaxException.convert( e ); + } + } + + public String[] getSqlStatements() { + return new String[] { sql }; + } + + public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { + + coordinateSharedCacheCleanup( session ); + + PreparedStatement st = null; + RowSelection selection = parameters.getRowSelection(); + + try { + try { + st = session.getBatcher().prepareStatement( sql ); + Iterator paramSpecifications = getWalker().getParameters().iterator(); + int pos = 1; + while ( paramSpecifications.hasNext() ) { + final ParameterSpecification paramSpec = ( ParameterSpecification ) paramSpecifications.next(); + pos += paramSpec.bind( st, parameters, session, pos ); + } + if ( selection != null ) { + if ( selection.getTimeout() != null ) { + st.setQueryTimeout( selection.getTimeout().intValue() ); + } + } + + return st.executeUpdate(); + } + finally { + if ( st != null ) { + session.getBatcher().closeStatement( st ); + } + } + } + catch( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + getFactory().getSQLExceptionConverter(), + sqle, + "could not execute update query", + sql + ); + } + } + + protected Queryable[] getAffectedQueryables() { + return new Queryable[] { persister }; + } +} diff --git a/src/org/hibernate/hql/ast/exec/MultiTableDeleteExecutor.java b/src/org/hibernate/hql/ast/exec/MultiTableDeleteExecutor.java new file mode 100644 index 0000000000..9c83539081 --- /dev/null +++ b/src/org/hibernate/hql/ast/exec/MultiTableDeleteExecutor.java @@ -0,0 +1,143 @@ +// $Id$ +package org.hibernate.hql.ast.exec; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Iterator; + +import org.hibernate.HibernateException; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.hql.ast.HqlSqlWalker; +import org.hibernate.hql.ast.tree.DeleteStatement; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.Delete; +import org.hibernate.util.StringHelper; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Implementation of MultiTableDeleteExecutor. + * + * @author Steve Ebersole + */ +public class MultiTableDeleteExecutor extends AbstractStatementExecutor { + private static final Log log = LogFactory.getLog( MultiTableDeleteExecutor.class ); + + private final Queryable persister; + private final String idInsertSelect; + private final String[] deletes; + + public MultiTableDeleteExecutor(HqlSqlWalker walker) { + super( walker, log ); + + if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) { + throw new HibernateException( "cannot perform multi-table deletes using dialect not supporting temp tables" ); + } + + DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST(); + FromElement fromElement = deleteStatement.getFromClause().getFromElement(); + String bulkTargetAlias = fromElement.getTableAlias(); + this.persister = fromElement.getQueryable(); + + this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, deleteStatement.getWhereClause() ); + log.trace( "Generated ID-INSERT-SELECT SQL (multi-table delete) : " + idInsertSelect ); + + String[] tableNames = persister.getConstraintOrderedTableNameClosure(); + String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure(); + String idSubselect = generateIdSubselect( persister ); + + deletes = new String[tableNames.length]; + for ( int i = tableNames.length - 1; i >= 0; i-- ) { + // TODO : an optimization here would be to consider cascade deletes and not gen those delete statements; + // the difficulty is the ordering of the tables here vs the cascade attributes on the persisters -> + // the table info gotten here should really be self-contained (i.e., a class representation + // defining all the needed attributes), then we could then get an array of those + final Delete delete = new Delete() + .setTableName( tableNames[i] ) + .setWhere( "(" + StringHelper.join( ", ", columnNames[i] ) + ") IN (" + idSubselect + ")" ); + if ( getFactory().getSettings().isCommentsEnabled() ) { + delete.setComment( "bulk delete" ); + } + + deletes[i] = delete.toStatementString(); + } + } + + public String[] getSqlStatements() { + return deletes; + } + + public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { + coordinateSharedCacheCleanup( session ); + + createTemporaryTableIfNecessary( persister, session ); + + try { + // First, save off the pertinent ids, saving the number of pertinent ids for return + PreparedStatement ps = null; + int resultCount = 0; + try { + try { + ps = session.getBatcher().prepareStatement( idInsertSelect ); + Iterator paramSpecifications = getWalker().getParameters().iterator(); + int pos = 1; + while ( paramSpecifications.hasNext() ) { + final ParameterSpecification paramSpec = ( ParameterSpecification ) paramSpecifications.next(); + pos += paramSpec.bind( ps, parameters, session, pos ); + } + resultCount = ps.executeUpdate(); + } + finally { + if ( ps != null ) { + session.getBatcher().closeStatement( ps ); + } + } + } + catch( SQLException e ) { + throw JDBCExceptionHelper.convert( + getFactory().getSQLExceptionConverter(), + e, + "could not insert/select ids for bulk delete", + idInsertSelect + ); + } + + // Start performing the deletes + for ( int i = 0; i < deletes.length; i++ ) { + try { + try { + ps = session.getBatcher().prepareStatement( deletes[i] ); + ps.executeUpdate(); + } + finally { + if ( ps != null ) { + session.getBatcher().closeStatement( ps ); + } + } + } + catch( SQLException e ) { + throw JDBCExceptionHelper.convert( + getFactory().getSQLExceptionConverter(), + e, + "error performing bulk delete", + deletes[i] + ); + } + } + + return resultCount; + } + finally { + dropTemporaryTableIfNecessary( persister, session ); + } + } + + protected Queryable[] getAffectedQueryables() { + return new Queryable[] { persister }; + } +} diff --git a/src/org/hibernate/hql/ast/exec/MultiTableUpdateExecutor.java b/src/org/hibernate/hql/ast/exec/MultiTableUpdateExecutor.java new file mode 100644 index 0000000000..63319ca9a7 --- /dev/null +++ b/src/org/hibernate/hql/ast/exec/MultiTableUpdateExecutor.java @@ -0,0 +1,177 @@ +// $Id$ +package org.hibernate.hql.ast.exec; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.hibernate.HibernateException; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.hql.ast.HqlSqlWalker; +import org.hibernate.hql.ast.tree.AssignmentSpecification; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.hql.ast.tree.UpdateStatement; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.Update; +import org.hibernate.util.StringHelper; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Implementation of MultiTableUpdateExecutor. + * + * @author Steve Ebersole + */ +public class MultiTableUpdateExecutor extends AbstractStatementExecutor { + private static final Log log = LogFactory.getLog( MultiTableUpdateExecutor.class ); + + private final Queryable persister; + private final String idInsertSelect; + private final String[] updates; + private final ParameterSpecification[][] hqlParameters; + + public MultiTableUpdateExecutor(HqlSqlWalker walker) { + super( walker, log ); + + if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) { + throw new HibernateException( "cannot perform multi-table updates using dialect not supporting temp tables" ); + } + + UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST(); + FromElement fromElement = updateStatement.getFromClause().getFromElement(); + String bulkTargetAlias = fromElement.getTableAlias(); + this.persister = fromElement.getQueryable(); + + this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, updateStatement.getWhereClause() ); + log.trace( "Generated ID-INSERT-SELECT SQL (multi-table update) : " + idInsertSelect ); + + String[] tableNames = persister.getConstraintOrderedTableNameClosure(); + String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure(); + + String idSubselect = generateIdSubselect( persister ); + List assignmentSpecifications = walker.getAssignmentSpecifications(); + + updates = new String[tableNames.length]; + hqlParameters = new ParameterSpecification[tableNames.length][]; + for ( int tableIndex = 0; tableIndex < tableNames.length; tableIndex++ ) { + boolean affected = false; + List parameterList = new ArrayList(); + Update update = new Update( getFactory().getDialect() ) + .setTableName( tableNames[tableIndex] ) + .setWhere( "(" + StringHelper.join( ", ", columnNames[tableIndex] ) + ") IN (" + idSubselect + ")" ); + if ( getFactory().getSettings().isCommentsEnabled() ) { + update.setComment( "bulk update" ); + } + final Iterator itr = assignmentSpecifications.iterator(); + while ( itr.hasNext() ) { + final AssignmentSpecification specification = ( AssignmentSpecification ) itr.next(); + if ( specification.affectsTable( tableNames[tableIndex] ) ) { + affected = true; + update.appendAssignmentFragment( specification.getSqlAssignmentFragment() ); + if ( specification.getParameters() != null ) { + for ( int paramIndex = 0; paramIndex < specification.getParameters().length; paramIndex++ ) { + parameterList.add( specification.getParameters()[paramIndex] ); + } + } + } + } + if ( affected ) { + updates[tableIndex] = update.toStatementString(); + hqlParameters[tableIndex] = ( ParameterSpecification[] ) parameterList.toArray( new ParameterSpecification[0] ); + } + } + } + + public Queryable getAffectedQueryable() { + return persister; + } + + public String[] getSqlStatements() { + return updates; + } + + public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { + coordinateSharedCacheCleanup( session ); + + createTemporaryTableIfNecessary( persister, session ); + + try { + // First, save off the pertinent ids, as the return value + PreparedStatement ps = null; + int resultCount = 0; + try { + try { + ps = session.getBatcher().prepareStatement( idInsertSelect ); + int parameterStart = getWalker().getNumberOfParametersInSetClause(); + List allParams = getWalker().getParameters(); + Iterator whereParams = allParams.subList( parameterStart, allParams.size() ).iterator(); + int sum = 1; // jdbc params are 1-based + while ( whereParams.hasNext() ) { + sum += ( ( ParameterSpecification ) whereParams.next() ).bind( ps, parameters, session, sum ); + } + resultCount = ps.executeUpdate(); + } + finally { + if ( ps != null ) { + session.getBatcher().closeStatement( ps ); + } + } + } + catch( SQLException e ) { + throw JDBCExceptionHelper.convert( + getFactory().getSQLExceptionConverter(), + e, + "could not insert/select ids for bulk update", + idInsertSelect + ); + } + + // Start performing the updates + for ( int i = 0; i < updates.length; i++ ) { + if ( updates[i] == null ) { + continue; + } + try { + try { + ps = session.getBatcher().prepareStatement( updates[i] ); + if ( hqlParameters[i] != null ) { + int position = 1; // jdbc params are 1-based + for ( int x = 0; x < hqlParameters[i].length; x++ ) { + position += hqlParameters[i][x].bind( ps, parameters, session, position ); + } + } + ps.executeUpdate(); + } + finally { + if ( ps != null ) { + session.getBatcher().closeStatement( ps ); + } + } + } + catch( SQLException e ) { + throw JDBCExceptionHelper.convert( + getFactory().getSQLExceptionConverter(), + e, + "error performing bulk update", + updates[i] + ); + } + } + + return resultCount; + } + finally { + dropTemporaryTableIfNecessary( persister, session ); + } + } + + protected Queryable[] getAffectedQueryables() { + return new Queryable[] { persister }; + } +} diff --git a/src/org/hibernate/hql/ast/exec/StatementExecutor.java b/src/org/hibernate/hql/ast/exec/StatementExecutor.java new file mode 100644 index 0000000000..3907821bee --- /dev/null +++ b/src/org/hibernate/hql/ast/exec/StatementExecutor.java @@ -0,0 +1,28 @@ +// $Id$ +package org.hibernate.hql.ast.exec; + +import org.hibernate.HibernateException; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionImplementor; + +/** + * Encapsulates the strategy required to execute various types of update, delete, + * and insert statements issued through HQL. + * + * @author Steve Ebersole + */ +public interface StatementExecutor { + + public String[] getSqlStatements(); + + /** + * Execute the sql managed by this executor using the given parameters. + * + * @param parameters Essentially bind information for this processing. + * @param session The session originating the request. + * @return The number of entities updated/deleted. + * @throws HibernateException + */ + public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException; + +} diff --git a/src/org/hibernate/hql/ast/package.html b/src/org/hibernate/hql/ast/package.html new file mode 100644 index 0000000000..da9b28c2d4 --- /dev/null +++ b/src/org/hibernate/hql/ast/package.html @@ -0,0 +1,6 @@ + +

    An ANTLR-based parser for Hibernate Query Language.

    +

    + Classes in this package extend the ANTLR-generated parser classes. +

    + \ No newline at end of file diff --git a/src/org/hibernate/hql/ast/tree/AbstractRestrictableStatement.java b/src/org/hibernate/hql/ast/tree/AbstractRestrictableStatement.java new file mode 100644 index 0000000000..229c7c780f --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/AbstractRestrictableStatement.java @@ -0,0 +1,64 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; + +import antlr.collections.AST; + +import org.apache.commons.logging.Log; + +/** + * Convenience implementation of RestrictableStatement to centralize common functionality. + * + * @author Steve Ebersole + */ +public abstract class AbstractRestrictableStatement extends AbstractStatement implements RestrictableStatement { + + private FromClause fromClause; + private AST whereClause; + + protected abstract int getWhereClauseParentTokenType(); + protected abstract Log getLog(); + + /** + * @see org.hibernate.hql.ast.tree.RestrictableStatement#getFromClause + */ + public final FromClause getFromClause() { + if ( fromClause == null ) { + fromClause = ( FromClause ) ASTUtil.findTypeInChildren( this, HqlSqlTokenTypes.FROM ); + } + return fromClause; + } + + /** + * @see RestrictableStatement#hasWhereClause + */ + public final boolean hasWhereClause() { + AST whereClause = locateWhereClause(); + return whereClause != null && whereClause.getNumberOfChildren() > 0; + } + + /** + * @see org.hibernate.hql.ast.tree.RestrictableStatement#getWhereClause + */ + public final AST getWhereClause() { + if ( whereClause == null ) { + whereClause = locateWhereClause(); + // If there is no WHERE node, make one. + if ( whereClause == null ) { + getLog().debug( "getWhereClause() : Creating a new WHERE clause..." ); + whereClause = ASTUtil.create( getWalker().getASTFactory(), HqlSqlTokenTypes.WHERE, "WHERE" ); + // inject the WHERE after the parent + AST parent = ASTUtil.findTypeInChildren( this, getWhereClauseParentTokenType() ); + whereClause.setNextSibling( parent.getNextSibling() ); + parent.setNextSibling( whereClause ); + } + } + return whereClause; + } + + protected AST locateWhereClause() { + return ASTUtil.findTypeInChildren( this, HqlSqlTokenTypes.WHERE ); + } +} diff --git a/src/org/hibernate/hql/ast/tree/AbstractSelectExpression.java b/src/org/hibernate/hql/ast/tree/AbstractSelectExpression.java new file mode 100644 index 0000000000..4f02520435 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/AbstractSelectExpression.java @@ -0,0 +1,43 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; + +import antlr.SemanticException; + +/** + * Partial implementation of SelectExpression for all the nodes that aren't constructors. + * + * @author josh Nov 11, 2004 7:09:11 AM + */ +public abstract class AbstractSelectExpression extends HqlSqlWalkerNode implements SelectExpression { + + private String alias; + + public final void setAlias(String alias) { + this.alias = alias; + } + + public final String getAlias() { + return alias; + } + + public boolean isConstructor() { + return false; + } + + public boolean isReturnableEntity() throws SemanticException { + return false; + } + + public FromElement getFromElement() { + return null; + } + + public boolean isScalar() throws SemanticException { + // Default implementation: + // If this node has a data type, and that data type is not an association, then this is scalar. + Type type = getDataType(); + return type != null && !type.isAssociationType(); // Moved here from SelectClause [jsd] + } +} diff --git a/src/org/hibernate/hql/ast/tree/AbstractStatement.java b/src/org/hibernate/hql/ast/tree/AbstractStatement.java new file mode 100644 index 0000000000..1d55fb062a --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/AbstractStatement.java @@ -0,0 +1,32 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.util.Iterator; + +/** + * Convenience implementation of Statement to centralize common functionality. + * + * @author Steve Ebersole + */ +public abstract class AbstractStatement extends HqlSqlWalkerNode implements DisplayableNode, Statement { + + /** + * Returns additional display text for the AST node. + * + * @return String - The additional display text. + */ + public String getDisplayText() { + StringBuffer buf = new StringBuffer(); + if ( getWalker().getQuerySpaces().size() > 0 ) { + buf.append( " querySpaces (" ); + for ( Iterator iterator = getWalker().getQuerySpaces().iterator(); iterator.hasNext(); ) { + buf.append( iterator.next() ); + if ( iterator.hasNext() ) { + buf.append( "," ); + } + } + buf.append( ")" ); + } + return buf.toString(); + } +} diff --git a/src/org/hibernate/hql/ast/tree/AggregateNode.java b/src/org/hibernate/hql/ast/tree/AggregateNode.java new file mode 100644 index 0000000000..7e316e46c7 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/AggregateNode.java @@ -0,0 +1,27 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.type.Type; + +import antlr.SemanticException; + +/** + * Represents an aggregate function i.e. min, max, sum, avg. + * + * @author josh Sep 21, 2004 9:22:02 PM + */ +public class AggregateNode extends AbstractSelectExpression implements SelectExpression { + + public AggregateNode() { + } + + public Type getDataType() { + // Get the function return value type, based on the type of the first argument. + return getSessionFactoryHelper().findFunctionReturnType( getText(), getFirstChild() ); + } + + public void setScalarColumnText(int i) throws SemanticException { + ColumnHelper.generateSingleScalarColumn( this, i ); + } +} diff --git a/src/org/hibernate/hql/ast/tree/AssignmentSpecification.java b/src/org/hibernate/hql/ast/tree/AssignmentSpecification.java new file mode 100644 index 0000000000..12c6c103b1 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/AssignmentSpecification.java @@ -0,0 +1,143 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.HashSet; +import java.util.Collections; + +import org.hibernate.QueryException; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.ast.SqlGenerator; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.persister.entity.UnionSubclassEntityPersister; + +import antlr.collections.AST; + +/** + * Encapsulates the information relating to an individual assignment within the + * set clause of an HQL update statement. This information is used during execution + * of the update statements when the updates occur against "multi-table" stuff. + * + * @author Steve Ebersole + */ +public class AssignmentSpecification { + + private final Set tableNames; + private final ParameterSpecification[] hqlParameters; + private final AST eq; + private final SessionFactoryImplementor factory; + + private String sqlAssignmentString; + + public AssignmentSpecification(AST eq, Queryable persister) { + if ( eq.getType() != HqlSqlTokenTypes.EQ ) { + throw new QueryException( "assignment in set-clause not associated with equals" ); + } + + this.eq = eq; + this.factory = persister.getFactory(); + + // Needed to bump this up to DotNode, because that is the only thing which currently + // knows about the property-ref path in the correct format; it is either this, or + // recurse over the DotNodes constructing the property path just like DotNode does + // internally + DotNode lhs = ( DotNode ) eq.getFirstChild(); + SqlNode rhs = ( SqlNode ) lhs.getNextSibling(); + + validateLhs( lhs ); + + final String propertyPath = lhs.getPropertyPath(); + Set temp = new HashSet(); + // yuck! + if ( persister instanceof UnionSubclassEntityPersister ) { + UnionSubclassEntityPersister usep = ( UnionSubclassEntityPersister ) persister; + String[] tables = persister.getConstraintOrderedTableNameClosure(); + int size = tables.length; + for ( int i = 0; i < size; i ++ ) { + temp.add( tables[i] ); + } + } + else { + temp.add( + persister.getSubclassTableName( persister.getSubclassPropertyTableNumber( propertyPath ) ) + ); + } + this.tableNames = Collections.unmodifiableSet( temp ); + + if (rhs==null) { + hqlParameters = new ParameterSpecification[0]; + } + else if ( isParam( rhs ) ) { + hqlParameters = new ParameterSpecification[] { ( ( ParameterNode ) rhs ).getHqlParameterSpecification() }; + } + else { + List parameterList = ASTUtil.collectChildren( + rhs, + new ASTUtil.IncludePredicate() { + public boolean include(AST node) { + return isParam( node ); + } + } + ); + hqlParameters = new ParameterSpecification[ parameterList.size() ]; + Iterator itr = parameterList.iterator(); + int i = 0; + while( itr.hasNext() ) { + hqlParameters[i++] = ( ( ParameterNode ) itr.next() ).getHqlParameterSpecification(); + } + } + } + + public boolean affectsTable(String tableName) { + return this.tableNames.contains( tableName ); + } + + public ParameterSpecification[] getParameters() { + return hqlParameters; + } + + public String getSqlAssignmentFragment() { + if ( sqlAssignmentString == null ) { + try { + SqlGenerator sqlGenerator = new SqlGenerator( factory ); + sqlGenerator.comparisonExpr( eq, false ); // false indicates to not generate parens around the assignment + sqlAssignmentString = sqlGenerator.getSQL(); + } + catch( Throwable t ) { + throw new QueryException( "cannot interpret set-clause assignment" ); + } + } + return sqlAssignmentString; + } + + private static boolean isParam(AST node) { + return node.getType() == HqlSqlTokenTypes.PARAM || node.getType() == HqlSqlTokenTypes.NAMED_PARAM; + } + + private void validateLhs(FromReferenceNode lhs) { + // make sure the lhs is "assignable"... + if ( !lhs.isResolved() ) { + throw new UnsupportedOperationException( "cannot validate assignablity of unresolved node" ); + } + + if ( lhs.getDataType().isCollectionType() ) { + throw new QueryException( "collections not assignable in update statements" ); + } + else if ( lhs.getDataType().isComponentType() ) { + throw new QueryException( "Components currently not assignable in update statements" ); + } + else if ( lhs.getDataType().isEntityType() ) { + // currently allowed... + } + + // TODO : why aren't these the same? + if ( lhs.getImpliedJoin() != null || lhs.getFromElement().isImplied() ) { + throw new QueryException( "Implied join paths are not assignable in update statements" ); + } + } +} diff --git a/src/org/hibernate/hql/ast/tree/BetweenOperatorNode.java b/src/org/hibernate/hql/ast/tree/BetweenOperatorNode.java new file mode 100644 index 0000000000..838d0932a3 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/BetweenOperatorNode.java @@ -0,0 +1,61 @@ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; +import org.hibernate.Hibernate; +import antlr.SemanticException; + +/** + * Contract for nodes representing logcial BETWEEN (ternary) operators. + * + * @author Steve Ebersole + */ +public class BetweenOperatorNode extends SqlNode implements OperatorNode { + + public void initialize() throws SemanticException { + Node fixture = getFixtureOperand(); + if ( fixture == null ) { + throw new SemanticException( "fixture operand of a between operator was null" ); + } + Node low = getLowOperand(); + if ( low == null ) { + throw new SemanticException( "low operand of a between operator was null" ); + } + Node high = getHighOperand(); + if ( high == null ) { + throw new SemanticException( "high operand of a between operator was null" ); + } + check( fixture, low, high ); + check( low, high, fixture ); + check( high, fixture, low ); + } + + public Type getDataType() { + // logic operators by definition resolve to boolean. + return Hibernate.BOOLEAN; + } + + public Node getFixtureOperand() { + return ( Node ) getFirstChild(); + } + + public Node getLowOperand() { + return ( Node ) getFirstChild().getNextSibling(); + } + + public Node getHighOperand() { + return ( Node ) getFirstChild().getNextSibling().getNextSibling(); + } + + private void check(Node check, Node first, Node second) { + if ( ExpectedTypeAwareNode.class.isAssignableFrom( check.getClass() ) ) { + Type expectedType = null; + if ( SqlNode.class.isAssignableFrom( first.getClass() ) ) { + expectedType = ( ( SqlNode ) first ).getDataType(); + } + if ( expectedType == null && SqlNode.class.isAssignableFrom( second.getClass() ) ) { + expectedType = ( ( SqlNode ) second ).getDataType(); + } + ( ( ExpectedTypeAwareNode ) check ).setExpectedType( expectedType ); + } + } +} diff --git a/src/org/hibernate/hql/ast/tree/BinaryArithmeticOperatorNode.java b/src/org/hibernate/hql/ast/tree/BinaryArithmeticOperatorNode.java new file mode 100644 index 0000000000..885c8908a0 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/BinaryArithmeticOperatorNode.java @@ -0,0 +1,191 @@ +//$Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.Hibernate; +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.type.Type; + +import antlr.SemanticException; + +/** + * Nodes which represent binary arithmetic operators. + * + * @author Gavin King + */ +public class BinaryArithmeticOperatorNode extends AbstractSelectExpression implements BinaryOperatorNode, DisplayableNode { + + public void initialize() throws SemanticException { + Node lhs = getLeftHandOperand(); + Node rhs = getRightHandOperand(); + if ( lhs == null ) { + throw new SemanticException( "left-hand operand of a binary operator was null" ); + } + if ( rhs == null ) { + throw new SemanticException( "right-hand operand of a binary operator was null" ); + } + + Type lhType = ( lhs instanceof SqlNode ) ? ( ( SqlNode ) lhs ).getDataType() : null; + Type rhType = ( rhs instanceof SqlNode ) ? ( ( SqlNode ) rhs ).getDataType() : null; + + if ( ExpectedTypeAwareNode.class.isAssignableFrom( lhs.getClass() ) && rhType != null ) { + Type expectedType = null; + // we have something like : "? [op] rhs" + if ( isDateTimeType( rhType ) ) { + // more specifically : "? [op] datetime" + // 1) if the operator is MINUS, the param needs to be of + // some datetime type + // 2) if the operator is PLUS, the param needs to be of + // some numeric type + expectedType = getType() == HqlSqlTokenTypes.PLUS ? Hibernate.DOUBLE : rhType; + } + else { + expectedType = rhType; + } + ( ( ExpectedTypeAwareNode ) lhs ).setExpectedType( expectedType ); + } + else if ( ParameterNode.class.isAssignableFrom( rhs.getClass() ) && lhType != null ) { + Type expectedType = null; + // we have something like : "lhs [op] ?" + if ( isDateTimeType( lhType ) ) { + // more specifically : "datetime [op] ?" + // 1) if the operator is MINUS, we really cannot determine + // the expected type as either another datetime or + // numeric would be valid + // 2) if the operator is PLUS, the param needs to be of + // some numeric type + if ( getType() == HqlSqlTokenTypes.PLUS ) { + expectedType = Hibernate.DOUBLE; + } + } + else { + expectedType = lhType; + } + ( ( ExpectedTypeAwareNode ) rhs ).setExpectedType( expectedType ); + } + } + + /** + * Figure out the type of the binary expression by looking at + * the types of the operands. Sometimes we don't know both types, + * if, for example, one is a parameter. + */ + public Type getDataType() { + if ( super.getDataType() == null ) { + super.setDataType( resolveDataType() ); + } + return super.getDataType(); + } + + private Type resolveDataType() { + // TODO : we may also want to check that the types here map to exactly one column/JDBC-type + // can't think of a situation where arithmetic expression between multi-column mappings + // makes any sense. + Node lhs = getLeftHandOperand(); + Node rhs = getRightHandOperand(); + Type lhType = ( lhs instanceof SqlNode ) ? ( ( SqlNode ) lhs ).getDataType() : null; + Type rhType = ( rhs instanceof SqlNode ) ? ( ( SqlNode ) rhs ).getDataType() : null; + if ( isDateTimeType( lhType ) || isDateTimeType( rhType ) ) { + return resolveDateTimeArithmeticResultType( lhType, rhType ); + } + else { + if ( lhType == null ) { + if ( rhType == null ) { + // we do not know either type + return Hibernate.DOUBLE; //BLIND GUESS! + } + else { + // we know only the rhs-hand type, so use that + return rhType; + } + } + else { + if ( rhType == null ) { + // we know only the lhs-hand type, so use that + return lhType; + } + else { + if ( lhType==Hibernate.DOUBLE || rhType==Hibernate.DOUBLE ) return Hibernate.DOUBLE; + if ( lhType==Hibernate.FLOAT || rhType==Hibernate.FLOAT ) return Hibernate.FLOAT; + if ( lhType==Hibernate.BIG_DECIMAL || rhType==Hibernate.BIG_DECIMAL ) return Hibernate.BIG_DECIMAL; + if ( lhType==Hibernate.BIG_INTEGER || rhType==Hibernate.BIG_INTEGER ) return Hibernate.BIG_INTEGER; + if ( lhType==Hibernate.LONG || rhType==Hibernate.LONG ) return Hibernate.LONG; + if ( lhType==Hibernate.INTEGER || rhType==Hibernate.INTEGER ) return Hibernate.INTEGER; + return lhType; + } + } + } + } + + private boolean isDateTimeType(Type type) { + if ( type == null ) { + return false; + } + return java.util.Date.class.isAssignableFrom( type.getReturnedClass() ) || + java.util.Calendar.class.isAssignableFrom( type.getReturnedClass() ); + } + + private Type resolveDateTimeArithmeticResultType(Type lhType, Type rhType) { + // here, we work under the following assumptions: + // ------------ valid cases -------------------------------------- + // 1) datetime + {something other than datetime} : always results + // in a datetime ( db will catch invalid conversions ) + // 2) datetime - datetime : always results in a DOUBLE + // 3) datetime - {something other than datetime} : always results + // in a datetime ( db will catch invalid conversions ) + // ------------ invalid cases ------------------------------------ + // 4) datetime + datetime + // 5) {something other than datetime} - datetime + // 6) datetime * {any type} + // 7) datetime / {any type} + // 8) {any type} / datetime + // doing so allows us to properly handle parameters as either the left + // or right side here in the majority of cases + boolean lhsIsDateTime = isDateTimeType( lhType ); + boolean rhsIsDateTime = isDateTimeType( rhType ); + + // handle the (assumed) valid cases: + // #1 - the only valid datetime addition synatx is one or the other is a datetime (but not both) + if ( getType() == HqlSqlTokenTypes.PLUS ) { + // one or the other needs to be a datetime for us to get into this method in the first place... + return lhsIsDateTime ? lhType : rhType; + } + else if ( getType() == HqlSqlTokenTypes.MINUS ) { + // #3 - note that this is also true of "datetime - :param"... + if ( lhsIsDateTime && !rhsIsDateTime ) { + return lhType; + } + // #2 + if ( lhsIsDateTime && rhsIsDateTime ) { + return Hibernate.DOUBLE; + } + } + return null; + } + + public void setScalarColumnText(int i) throws SemanticException { + ColumnHelper.generateSingleScalarColumn( this, i ); + } + + /** + * Retrieves the left-hand operand of the operator. + * + * @return The left-hand operand + */ + public Node getLeftHandOperand() { + return ( Node ) getFirstChild(); + } + + /** + * Retrieves the right-hand operand of the operator. + * + * @return The right-hand operand + */ + public Node getRightHandOperand() { + return ( Node ) getFirstChild().getNextSibling(); + } + + public String getDisplayText() { + return "{dataType=" + getDataType() + "}"; + } +} diff --git a/src/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java b/src/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java new file mode 100644 index 0000000000..5137733583 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java @@ -0,0 +1,198 @@ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; +import org.hibernate.Hibernate; +import org.hibernate.TypeMismatchException; +import org.hibernate.HibernateException; +import org.hibernate.util.StringHelper; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.dialect.HSQLDialect; +import org.hibernate.engine.SessionFactoryImplementor; +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * Contract for nodes representing binary operators. + * + * @author Steve Ebersole + */ +public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryOperatorNode { + /** + * Performs the operator node initialization by seeking out any parameter + * nodes and setting their expected type, if possible. + */ + public void initialize() throws SemanticException { + Node lhs = getLeftHandOperand(); + if ( lhs == null ) { + throw new SemanticException( "left-hand operand of a binary operator was null" ); + } + Node rhs = getRightHandOperand(); + if ( rhs == null ) { + throw new SemanticException( "right-hand operand of a binary operator was null" ); + } + + Type lhsType = extractDataType( lhs ); + Type rhsType = extractDataType( rhs ); + + if ( lhsType == null ) { + lhsType = rhsType; + } + if ( rhsType == null ) { + rhsType = lhsType; + } + + if ( ExpectedTypeAwareNode.class.isAssignableFrom( lhs.getClass() ) ) { + ( ( ExpectedTypeAwareNode ) lhs ).setExpectedType( rhsType ); + } + if ( ExpectedTypeAwareNode.class.isAssignableFrom( rhs.getClass() ) ) { + ( ( ExpectedTypeAwareNode ) rhs ).setExpectedType( lhsType ); + } + + mutateRowValueConstructorSyntaxesIfNecessary( lhsType, rhsType ); + } + + protected final void mutateRowValueConstructorSyntaxesIfNecessary(Type lhsType, Type rhsType) { + // TODO : this really needs to be delayed unitl after we definitively know all node types + // where this is currently a problem is parameters for which where we cannot unequivocally + // resolve an expected type + SessionFactoryImplementor sessionFactory = getSessionFactoryHelper().getFactory(); + if ( lhsType != null && rhsType != null ) { + int lhsColumnSpan = lhsType.getColumnSpan( sessionFactory ); + if ( lhsColumnSpan != rhsType.getColumnSpan( sessionFactory ) ) { + throw new TypeMismatchException( + "left and right hand sides of a binary logic operator were incompatibile [" + + lhsType.getName() + " : "+ rhsType.getName() + "]" + ); + } + if ( lhsColumnSpan > 1 ) { + // for dialects which are known to not support ANSI-SQL row-value-constructor syntax, + // we should mutate the tree. + if ( !sessionFactory.getDialect().supportsRowValueConstructorSyntax() ) { + mutateRowValueConstructorSyntax( lhsColumnSpan ); + } + } + } + } + + /** + * Mutate the subtree relating to a row-value-constructor to instead use + * a series of ANDed predicates. This allows multi-column type comparisons + * and explicit row-value-constructor syntax even on databases which do + * not support row-value-constructor. + *

    + * For example, here we'd mutate "... where (col1, col2) = ('val1', 'val2) ..." to + * "... where col1 = 'val1' and col2 = 'val2' ..." + * + * @param valueElements The number of elements in the row value constructor list. + */ + private void mutateRowValueConstructorSyntax(int valueElements) { + // mutation depends on the types of nodes invloved... + int comparisonType = getType(); + String comparisonText = getText(); + setType( HqlSqlTokenTypes.AND ); + setText( "AND" ); + String[] lhsElementTexts = extractMutationTexts( getLeftHandOperand(), valueElements ); + String[] rhsElementTexts = extractMutationTexts( getRightHandOperand(), valueElements ); + + AST container = this; + for ( int i = valueElements - 1; i > 0; i-- ) { + + if ( i == 1 ) { + AST op1 = getASTFactory().create( comparisonType, comparisonText ); + AST lhs1 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, lhsElementTexts[0] ); + AST rhs1 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, rhsElementTexts[0] ); + op1.setFirstChild( lhs1 ); + lhs1.setNextSibling( rhs1 ); + container.setFirstChild( op1 ); + AST op2 = getASTFactory().create( comparisonType, comparisonText ); + AST lhs2 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, lhsElementTexts[1] ); + AST rhs2 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, rhsElementTexts[1] ); + op2.setFirstChild( lhs2 ); + lhs2.setNextSibling( rhs2 ); + op1.setNextSibling( op2 ); + } + else { + AST op = getASTFactory().create( comparisonType, comparisonText ); + AST lhs = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, lhsElementTexts[i] ); + AST rhs = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, rhsElementTexts[i] ); + op.setFirstChild( lhs ); + lhs.setNextSibling( rhs ); + AST newContainer = getASTFactory().create( HqlSqlTokenTypes.AND, "AND" ); + container.setFirstChild( newContainer ); + newContainer.setNextSibling( op ); + container = newContainer; + } + } + } + + private static String[] extractMutationTexts(Node operand, int count) { + if ( operand instanceof ParameterNode ) { + String[] rtn = new String[count]; + for ( int i = 0; i < count; i++ ) { + rtn[i] = "?"; + } + return rtn; + } + else if ( operand.getType() == HqlSqlTokenTypes.VECTOR_EXPR ) { + String[] rtn = new String[ operand.getNumberOfChildren() ]; + int x = 0; + AST node = operand.getFirstChild(); + while ( node != null ) { + rtn[ x++ ] = node.getText(); + node = node.getNextSibling(); + } + return rtn; + } + else if ( operand instanceof SqlNode ) { + String nodeText = operand.getText(); + if ( nodeText.startsWith( "(" ) ) { + nodeText = nodeText.substring( 1 ); + } + if ( nodeText.endsWith( ")" ) ) { + nodeText = nodeText.substring( 0, nodeText.length() - 1 ); + } + String[] splits = StringHelper.split( ", ", nodeText ); + if ( count != splits.length ) { + throw new HibernateException( "SqlNode's text did not reference expected number of columns" ); + } + return splits; + } + else { + throw new HibernateException( "dont know how to extract row value elements from node : " + operand ); + } + } + + protected Type extractDataType(Node operand) { + Type type = null; + if ( operand instanceof SqlNode ) { + type = ( ( SqlNode ) operand ).getDataType(); + } + if ( type == null && operand instanceof ExpectedTypeAwareNode ) { + type = ( ( ExpectedTypeAwareNode ) operand ).getExpectedType(); + } + return type; + } + + public Type getDataType() { + // logic operators by definition resolve to booleans + return Hibernate.BOOLEAN; + } + + /** + * Retrieves the left-hand operand of the operator. + * + * @return The left-hand operand + */ + public Node getLeftHandOperand() { + return ( Node ) getFirstChild(); + } + + /** + * Retrieves the right-hand operand of the operator. + * + * @return The right-hand operand + */ + public Node getRightHandOperand() { + return ( Node ) getFirstChild().getNextSibling(); + } +} diff --git a/src/org/hibernate/hql/ast/tree/BinaryOperatorNode.java b/src/org/hibernate/hql/ast/tree/BinaryOperatorNode.java new file mode 100644 index 0000000000..4384d3397f --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/BinaryOperatorNode.java @@ -0,0 +1,22 @@ +package org.hibernate.hql.ast.tree; + +/** + * Contract for nodes representing binary operators. + * + * @author Steve Ebersole + */ +public interface BinaryOperatorNode extends OperatorNode { + /** + * Retrieves the left-hand operand of the operator. + * + * @return The left-hand operand + */ + public Node getLeftHandOperand(); + + /** + * Retrieves the right-hand operand of the operator. + * + * @return The right-hand operand + */ + public Node getRightHandOperand(); +} diff --git a/src/org/hibernate/hql/ast/tree/BooleanLiteralNode.java b/src/org/hibernate/hql/ast/tree/BooleanLiteralNode.java new file mode 100644 index 0000000000..2130f54c7d --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/BooleanLiteralNode.java @@ -0,0 +1,50 @@ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; +import org.hibernate.type.BooleanType; +import org.hibernate.Hibernate; +import org.hibernate.QueryException; +import org.hibernate.engine.SessionFactoryImplementor; + +/** + * Represents a boolean literal within a query. + * + * @author Steve Ebersole + */ +public class BooleanLiteralNode extends LiteralNode implements ExpectedTypeAwareNode { + private Type expectedType; + + public Type getDataType() { + return expectedType == null ? Hibernate.BOOLEAN : expectedType; + } + + public BooleanType getTypeInternal() { + return ( BooleanType ) getDataType(); + } + + public Boolean getValue() { + return getType() == TRUE ? Boolean.TRUE : Boolean.FALSE; + } + + /** + * Expected-types really only pertinent here for boolean literals... + * + * @param expectedType + */ + public void setExpectedType(Type expectedType) { + this.expectedType = expectedType; + } + + public Type getExpectedType() { + return expectedType; + } + + public String getRenderText(SessionFactoryImplementor sessionFactory) { + try { + return getTypeInternal().objectToSQLString( getValue(), sessionFactory.getDialect() ); + } + catch( Throwable t ) { + throw new QueryException( "Unable to render boolean literal value", t ); + } + } +} diff --git a/src/org/hibernate/hql/ast/tree/Case2Node.java b/src/org/hibernate/hql/ast/tree/Case2Node.java new file mode 100644 index 0000000000..4e3e27ea08 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/Case2Node.java @@ -0,0 +1,28 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.type.Type; + +import antlr.SemanticException; + +/** + * Represents a case ... when .. then ... else ... end expression in a select. + * + * @author Gavin King + */ +public class Case2Node extends AbstractSelectExpression implements SelectExpression { + + public Type getDataType() { + return getFirstThenNode().getDataType(); + } + + private SelectExpression getFirstThenNode() { + return (SelectExpression) getFirstChild().getNextSibling().getFirstChild().getNextSibling(); + } + + public void setScalarColumnText(int i) throws SemanticException { + ColumnHelper.generateSingleScalarColumn( this, i ); + } + +} diff --git a/src/org/hibernate/hql/ast/tree/CaseNode.java b/src/org/hibernate/hql/ast/tree/CaseNode.java new file mode 100644 index 0000000000..5acc134266 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/CaseNode.java @@ -0,0 +1,28 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.type.Type; + +import antlr.SemanticException; + +/** + * Represents a case ... when .. then ... else ... end expression in a select. + * + * @author Gavin King + */ +public class CaseNode extends AbstractSelectExpression implements SelectExpression { + + public Type getDataType() { + return getFirstThenNode().getDataType(); + } + + private SelectExpression getFirstThenNode() { + return (SelectExpression) getFirstChild().getFirstChild().getNextSibling(); + } + + public void setScalarColumnText(int i) throws SemanticException { + ColumnHelper.generateSingleScalarColumn( this, i ); + } + +} diff --git a/src/org/hibernate/hql/ast/tree/CollectionFunction.java b/src/org/hibernate/hql/ast/tree/CollectionFunction.java new file mode 100644 index 0000000000..52923a5c38 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/CollectionFunction.java @@ -0,0 +1,33 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * Represents 'elements()' or 'indices()'. + * + * @author josh Dec 6, 2004 8:36:42 AM + */ +public class CollectionFunction extends MethodNode implements DisplayableNode { + public void resolve(boolean inSelect) throws SemanticException { + initializeMethodNode( this, inSelect ); + if ( !isCollectionPropertyMethod() ) { + throw new SemanticException( this.getText() + " is not a collection property name!" ); + } + AST expr = getFirstChild(); + if ( expr == null ) { + throw new SemanticException( this.getText() + " requires a path!" ); + } + resolveCollectionProperty( expr ); + } + + protected void prepareSelectColumns(String[] selectColumns) { + // we need to strip off the embedded parens so that sql-gen does not double these up + String subselect = selectColumns[0].trim(); + if ( subselect.startsWith( "(") && subselect.endsWith( ")" ) ) { + subselect = subselect.substring( 1, subselect.length() -1 ); + } + selectColumns[0] = subselect; + } +} diff --git a/src/org/hibernate/hql/ast/tree/ConstructorNode.java b/src/org/hibernate/hql/ast/tree/ConstructorNode.java new file mode 100644 index 0000000000..4ca53f5405 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/ConstructorNode.java @@ -0,0 +1,158 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.lang.reflect.Constructor; +import java.util.Arrays; +import java.util.List; + +import org.hibernate.PropertyNotFoundException; +import org.hibernate.hql.ast.DetailedSemanticException; +import org.hibernate.type.Type; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * Represents a constructor (new) in a SELECT. + * + * @author josh Sep 24, 2004 6:46:08 PM + */ +public class ConstructorNode extends SelectExpressionList implements SelectExpression { + + private Constructor constructor; + private Type[] constructorArgumentTypes; + private boolean isMap; + private boolean isList; + + public boolean isMap() { + return isMap; + } + + public boolean isList() { + return isList; + } + + public String[] getAliases() { + SelectExpression[] selectExpressions = collectSelectExpressions(); + String[] aliases = new String[selectExpressions.length] ; + for ( int i=0; i 1 && getWalker().isComparativeExpressionClause() ) { + text = "(" + text + ")"; + } + setText( text ); + } + + private Type prepareLhs() throws SemanticException { + FromReferenceNode lhs = getLhs(); + lhs.prepareForDot( propertyName ); + return getDataType(); + } + + private void dereferenceCollection(CollectionType collectionType, boolean implicitJoin, boolean indexed, String classAlias, AST parent) + throws SemanticException { + + dereferenceType = DEREF_COLLECTION; + String role = collectionType.getRole(); + + //foo.bars.size (also handles deprecated stuff like foo.bars.maxelement for backwardness) + boolean isSizeProperty = getNextSibling()!=null && + CollectionProperties.isAnyCollectionProperty( getNextSibling().getText() ); + + if ( isSizeProperty ) indexed = true; //yuck! + + QueryableCollection queryableCollection = getSessionFactoryHelper().requireQueryableCollection( role ); + String propName = getPath(); + FromClause currentFromClause = getWalker().getCurrentFromClause(); + + if ( getWalker().getStatementType() != SqlTokenTypes.SELECT && indexed && classAlias == null ) { + // should indicate that we are processing an INSERT/UPDATE/DELETE + // query with a subquery implied via a collection property + // function. Here, we need to use the table name itself as the + // qualification alias. + // TODO : verify this works for all databases... + // TODO : is this also the case in non-"indexed" scenarios? + String alias = getLhs().getFromElement().getQueryable().getTableName(); + columns = getFromElement().toColumns( alias, propertyPath, false, true ); + } + + //We do not look for an existing join on the same path, because + //it makes sense to join twice on the same collection role + FromElementFactory factory = new FromElementFactory( + currentFromClause, + getLhs().getFromElement(), + propName, + classAlias, + getColumns(), + implicitJoin + ); + FromElement elem = factory.createCollection( queryableCollection, role, joinType, fetch, indexed ); + + if ( log.isDebugEnabled() ) { + log.debug( "dereferenceCollection() : Created new FROM element for " + propName + " : " + elem ); + } + + setImpliedJoin( elem ); + setFromElement( elem ); // This 'dot' expression now refers to the resulting from element. + + if ( isSizeProperty ) { + elem.setText(""); + elem.setUseWhereFragment(false); + } + + if ( !implicitJoin ) { + EntityPersister entityPersister = elem.getEntityPersister(); + if ( entityPersister != null ) { + getWalker().addQuerySpaces( entityPersister.getQuerySpaces() ); + } + } + getWalker().addQuerySpaces( queryableCollection.getCollectionSpaces() ); // Always add the collection's query spaces. + } + + private void dereferenceEntity(EntityType entityType, boolean implicitJoin, String classAlias, boolean generateJoin, AST parent) throws SemanticException { + checkForCorrelatedSubquery( "dereferenceEntity" ); + // three general cases we check here as to whether to render a physical SQL join: + // 1) is our parent a DotNode as well? If so, our property reference is + // being further de-referenced... + // 2) is this a DML statement + // 3) we were asked to generate any needed joins (generateJoins==true) *OR* + // we are currently processing a select or from clause + // (an additional check is the REGRESSION_STYLE_JOIN_SUPPRESSION check solely intended for the test suite) + // + // The REGRESSION_STYLE_JOIN_SUPPRESSION is an additional check + // intended solely for use within the test suite. This forces the + // implicit join resolution to behave more like the classic parser. + // The underlying issue is that classic translator is simply wrong + // about its decisions on whether or not to render an implicit join + // into a physical SQL join in a lot of cases. The piece it generally + // tends to miss is that INNER joins effect the results by further + // restricting the data set! A particular manifestation of this is + // the fact that the classic translator will skip the physical join + // for ToOne implicit joins *if the query is shallow*; the result + // being that Query.list() and Query.iterate() could return + // different number of results! + DotNode parentAsDotNode = null; + String property = propertyName; + final boolean joinIsNeeded; + + if ( isDotNode( parent ) ) { + // our parent is another dot node, meaning we are being further dereferenced. + // thus we need to generate a join unless the parent refers to the associated + // entity's PK (because 'our' table would know the FK). + parentAsDotNode = ( DotNode ) parent; + property = parentAsDotNode.propertyName; + joinIsNeeded = generateJoin && !isReferenceToPrimaryKey( parentAsDotNode.propertyName, entityType ); + } + else if ( ! getWalker().isSelectStatement() ) { + joinIsNeeded = false; + } + else if ( REGRESSION_STYLE_JOIN_SUPPRESSION ) { + // this is the regression style determination which matches the logic of the classic translator + joinIsNeeded = generateJoin && ( !getWalker().isInSelect() || !getWalker().isShallowQuery() ); + } + else { + joinIsNeeded = generateJoin || ( getWalker().isInSelect() || getWalker().isInFrom() ); + } + + if ( joinIsNeeded ) { + dereferenceEntityJoin( classAlias, entityType, implicitJoin, parent ); + } + else { + dereferenceEntityIdentifier( property, parentAsDotNode ); + } + + } + + private boolean isDotNode(AST n) { + return n != null && n.getType() == SqlTokenTypes.DOT; + } + + private void dereferenceEntityJoin(String classAlias, EntityType propertyType, boolean impliedJoin, AST parent) + throws SemanticException { + dereferenceType = DEREF_ENTITY; + if ( log.isDebugEnabled() ) { + log.debug( "dereferenceEntityJoin() : generating join for " + propertyName + " in " + + getFromElement().getClassName() + " " + + ( ( classAlias == null ) ? "{no alias}" : "(" + classAlias + ")" ) + + " parent = " + ASTUtil.getDebugString( parent ) + ); + } + // Create a new FROM node for the referenced class. + String associatedEntityName = propertyType.getAssociatedEntityName(); + String tableAlias = getAliasGenerator().createName( associatedEntityName ); + + String[] joinColumns = getColumns(); + String joinPath = getPath(); + + if ( impliedJoin && getWalker().isInFrom() ) { + joinType = getWalker().getImpliedJoinType(); + } + + FromClause currentFromClause = getWalker().getCurrentFromClause(); + FromElement elem = currentFromClause.findJoinByPath( joinPath ); + +/////////////////////////////////////////////////////////////////////////////// +// +// This is the piece which recognizes the condition where an implicit join path +// resolved earlier in a correlated subquery is now being referenced in the +// outer query. For 3.0final, we just let this generate a second join (which +// is exactly how the old parser handles this). Eventually we need to add this +// logic back in and complete the logic in FromClause.promoteJoin; however, +// FromClause.promoteJoin has its own difficulties (see the comments in +// FromClause.promoteJoin). +// +// if ( elem == null ) { +// // see if this joinPath has been used in a "child" FromClause, and if so +// // promote that element to the outer query +// FromClause currentNodeOwner = getFromElement().getFromClause(); +// FromClause currentJoinOwner = currentNodeOwner.locateChildFromClauseWithJoinByPath( joinPath ); +// if ( currentJoinOwner != null && currentNodeOwner != currentJoinOwner ) { +// elem = currentJoinOwner.findJoinByPathLocal( joinPath ); +// if ( elem != null ) { +// currentFromClause.promoteJoin( elem ); +// // EARLY EXIT!!! +// return; +// } +// } +// } +// +/////////////////////////////////////////////////////////////////////////////// + + if ( elem == null ) { + // If this is an implied join in a from element, then use the impled join type which is part of the + // tree parser's state (set by the gramamar actions). + JoinSequence joinSequence = getSessionFactoryHelper() + .createJoinSequence( impliedJoin, propertyType, tableAlias, joinType, joinColumns ); + + FromElementFactory factory = new FromElementFactory( + currentFromClause, + getLhs().getFromElement(), + joinPath, + classAlias, + joinColumns, + impliedJoin + ); + elem = factory.createEntityJoin( + associatedEntityName, + tableAlias, + joinSequence, + fetch, + getWalker().isInFrom(), + propertyType + ); + } + else { + currentFromClause.addDuplicateAlias(classAlias, elem); + } + setImpliedJoin( elem ); + getWalker().addQuerySpaces( elem.getEntityPersister().getQuerySpaces() ); + setFromElement( elem ); // This 'dot' expression now refers to the resulting from element. + } + + private void setImpliedJoin(FromElement elem) { + this.impliedJoin = elem; + if ( getFirstChild().getType() == SqlTokenTypes.DOT ) { + DotNode dotLhs = ( DotNode ) getFirstChild(); + if ( dotLhs.getImpliedJoin() != null ) { + this.impliedJoin = dotLhs.getImpliedJoin(); + } + } + } + + public FromElement getImpliedJoin() { + return impliedJoin; + } + + /** + * Is the given property name a reference to the primary key of the associated + * entity construed by the given entity type? + *

    + * For example, consider a fragment like order.customer.id + * (where order is a from-element alias). Here, we'd have: + * propertyName = "id" AND + * owningType = ManyToOneType(Customer) + * and are being asked to determine whether "customer.id" is a reference + * to customer's PK... + * + * @param propertyName The name of the property to check. + * @param owningType The type represeting the entity "owning" the property + * @return True if propertyName references the entity's (owningType->associatedEntity) + * primary key; false otherwise. + */ + private boolean isReferenceToPrimaryKey(String propertyName, EntityType owningType) { + EntityPersister persister = getSessionFactoryHelper() + .getFactory() + .getEntityPersister( owningType.getAssociatedEntityName() ); + if ( persister.getEntityMetamodel().hasNonIdentifierPropertyNamedId() ) { + // only the identifier property field name can be a reference to the associated entity's PK... + return propertyName.equals( persister.getIdentifierPropertyName() ) && owningType.isReferenceToPrimaryKey(); + } + else { + // here, we have two possibilities: + // 1) the property-name matches the explicitly identifier property name + // 2) the property-name matches the implicit 'id' property name + if ( EntityPersister.ENTITY_ID.equals( propertyName ) ) { + // the referenced node text is the special 'id' + return owningType.isReferenceToPrimaryKey(); + } + else { + String keyPropertyName = getSessionFactoryHelper().getIdentifierOrUniqueKeyPropertyName( owningType ); + return keyPropertyName != null && keyPropertyName.equals( propertyName ) && owningType.isReferenceToPrimaryKey(); + } + } + } + + private void checkForCorrelatedSubquery(String methodName) { + if ( isCorrelatedSubselect() ) { + if ( log.isDebugEnabled() ) { + log.debug( methodName + "() : correlated subquery" ); + } + } + } + + private boolean isCorrelatedSubselect() { + return getWalker().isSubQuery() && + getFromElement().getFromClause() != getWalker().getCurrentFromClause(); + } + + private void checkLhsIsNotCollection() throws SemanticException { + if ( getLhs().getDataType() != null && getLhs().getDataType().isCollectionType() ) { + throw ILLEGAL_COLL_DEREF_EXCP_BUILDER.buildIllegalCollectionDereferenceException( propertyName, getLhs() ); + } + } + + private void dereferenceComponent(AST parent) { + dereferenceType = DEREF_COMPONENT; + setPropertyNameAndPath( parent ); + } + + private void dereferenceEntityIdentifier(String propertyName, DotNode dotParent) { + // special shortcut for id properties, skip the join! + // this must only occur at the _end_ of a path expression + if ( log.isDebugEnabled() ) { + log.debug( "dereferenceShortcut() : property " + + propertyName + " in " + getFromElement().getClassName() + + " does not require a join." ); + } + + initText(); + setPropertyNameAndPath( dotParent ); // Set the unresolved path in this node and the parent. + // Set the text for the parent. + if ( dotParent != null ) { + dotParent.dereferenceType = DEREF_IDENTIFIER; + dotParent.setText( getText() ); + dotParent.columns = getColumns(); + } + } + + private void setPropertyNameAndPath(AST parent) { + if ( isDotNode( parent ) ) { + DotNode dotNode = ( DotNode ) parent; + AST lhs = dotNode.getFirstChild(); + AST rhs = lhs.getNextSibling(); + propertyName = rhs.getText(); + propertyPath = propertyPath + "." + propertyName; // Append the new property name onto the unresolved path. + dotNode.propertyPath = propertyPath; + if ( log.isDebugEnabled() ) { + log.debug( "Unresolved property path is now '" + dotNode.propertyPath + "'" ); + } + } + else { + if ( log.isDebugEnabled() ) { + log.debug( "terminal propertyPath = [" + propertyPath + "]" ); + } + } + } + + public Type getDataType() { + if ( super.getDataType() == null ) { + FromElement fromElement = getLhs().getFromElement(); + if ( fromElement == null ) { + return null; + } + // If the lhs is a collection, use CollectionPropertyMapping + Type propertyType = fromElement.getPropertyType( propertyName, propertyPath ); + if ( log.isDebugEnabled() ) { + log.debug( "getDataType() : " + propertyPath + " -> " + propertyType ); + } + super.setDataType( propertyType ); + } + return super.getDataType(); + } + + public void setPropertyPath(String propertyPath) { + this.propertyPath = propertyPath; + } + + public String getPropertyPath() { + return propertyPath; + } + + public FromReferenceNode getLhs() { + FromReferenceNode lhs = ( ( FromReferenceNode ) getFirstChild() ); + if ( lhs == null ) { + throw new IllegalStateException( "DOT node with no left-hand-side!" ); + } + return lhs; + } + + /** + * Returns the full path of the node. + * + * @return the full path of the node. + */ + public String getPath() { + if ( path == null ) { + FromReferenceNode lhs = getLhs(); + if ( lhs == null ) { + path = getText(); + } + else { + SqlNode rhs = ( SqlNode ) lhs.getNextSibling(); + path = lhs.getPath() + "." + rhs.getOriginalText(); + } + } + return path; + } + + public void setFetch(boolean fetch) { + this.fetch = fetch; + } + + public void setScalarColumnText(int i) throws SemanticException { + String[] sqlColumns = getColumns(); + ColumnHelper.generateScalarColumns( this, sqlColumns, i ); + } + + /** + * Special method to resolve expressions in the SELECT list. + * + * @throws SemanticException if this cannot be resolved. + */ + public void resolveSelectExpression() throws SemanticException { + if ( getWalker().isShallowQuery() || getWalker().getCurrentFromClause().isSubQuery() ) { + resolve(false, true); + } + else { + resolve(true, false); + Type type = getDataType(); + if ( type.isEntityType() ) { + FromElement fromElement = getFromElement(); + fromElement.setIncludeSubclasses( true ); // Tell the destination fromElement to 'includeSubclasses'. + if ( useThetaStyleImplicitJoins ) { + fromElement.getJoinSequence().setUseThetaStyle( true ); // Use theta style (for regression) + // Move the node up, after the origin node. + FromElement origin = fromElement.getOrigin(); + if ( origin != null ) { + ASTUtil.makeSiblingOfParent( origin, fromElement ); + } + } + } + } + + FromReferenceNode lhs = getLhs(); + while ( lhs != null ) { + checkSubclassOrSuperclassPropertyReference( lhs, lhs.getNextSibling().getText() ); + lhs = ( FromReferenceNode ) lhs.getFirstChild(); + } + } + + public void setResolvedConstant(String text) { + path = text; + dereferenceType = DEREF_JAVA_CONSTANT; + setResolved(); // Don't resolve the node again. + } + + private boolean checkSubclassOrSuperclassPropertyReference(FromReferenceNode lhs, String propertyName) { + if ( lhs != null && !( lhs instanceof IndexNode ) ) { + final FromElement source = lhs.getFromElement(); + if ( source != null ) { + source.handlePropertyBeingDereferenced( lhs.getDataType(), propertyName ); + } + } + return false; + } +} diff --git a/src/org/hibernate/hql/ast/tree/ExpectedTypeAwareNode.java b/src/org/hibernate/hql/ast/tree/ExpectedTypeAwareNode.java new file mode 100644 index 0000000000..ffa858fd2f --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/ExpectedTypeAwareNode.java @@ -0,0 +1,14 @@ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; + +/** + * Interface for nodes which wish to be made aware of any determined "expected + * type" based on the context within they appear in the query. + * + * @author Steve Ebersole + */ +public interface ExpectedTypeAwareNode { + public void setExpectedType(Type expectedType); + public Type getExpectedType(); +} diff --git a/src/org/hibernate/hql/ast/tree/FromClause.java b/src/org/hibernate/hql/ast/tree/FromClause.java new file mode 100644 index 0000000000..9722ec4cda --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/FromClause.java @@ -0,0 +1,364 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.ast.util.ASTIterator; +import org.hibernate.hql.ast.util.ASTUtil; + +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * Represents the 'FROM' part of a query or subquery, containing all mapped class references. + * + * @author josh + */ +public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, DisplayableNode { + private static Log log = LogFactory.getLog( FromClause.class ); + public static final int ROOT_LEVEL = 1; + + private int level = ROOT_LEVEL; + private Set fromElements = new HashSet(); + private Map fromElementByClassAlias = new HashMap(); + private Map fromElementByTableAlias = new HashMap(); + private Map fromElementsByPath = new HashMap(); + + /** + * All of the implicit FROM xxx JOIN yyy elements that are the destination of a collection. These are created from + * index operators on collection property references. + */ + private Map collectionJoinFromElementsByPath = new HashMap(); + /** + * Pointer to the parent FROM clause, if there is one. + */ + private FromClause parentFromClause; + /** + * Collection of FROM clauses of which this is the parent. + */ + private Set childFromClauses; + /** + * Counts the from elements as they are added. + */ + private int fromElementCounter = 0; + /** + * Implied FROM elements to add onto the end of the FROM clause. + */ + private List impliedElements = new LinkedList(); + + /** + * Adds a new from element to the from node. + * + * @param path The reference to the class. + * @param alias The alias AST. + * @return FromElement - The new FROM element. + */ + public FromElement addFromElement(String path, AST alias) throws SemanticException { + // The path may be a reference to an alias defined in the parent query. + String classAlias = ( alias == null ) ? null : alias.getText(); + checkForDuplicateClassAlias( classAlias ); + FromElementFactory factory = new FromElementFactory( this, null, path, classAlias, null, false ); + return factory.addFromElement(); + } + + void registerFromElement(FromElement element) { + fromElements.add( element ); + String classAlias = element.getClassAlias(); + if ( classAlias != null ) { + // The HQL class alias refers to the class name. + fromElementByClassAlias.put( classAlias, element ); + } + // Associate the table alias with the element. + String tableAlias = element.getTableAlias(); + if ( tableAlias != null ) { + fromElementByTableAlias.put( tableAlias, element ); + } + } + + void addDuplicateAlias(String alias, FromElement element) { + fromElementByClassAlias.put( alias, element ); + } + + private void checkForDuplicateClassAlias(String classAlias) throws SemanticException { + if ( classAlias != null && fromElementByClassAlias.containsKey( classAlias ) ) { + throw new SemanticException( "Duplicate definition of alias '" + + classAlias + "'" ); + } + } + + /** + * Retreives the from-element represented by the given alias. + * + * @param aliasOrClassName The alias by which to locate the from-element. + * @return The from-element assigned the given alias, or null if none. + */ + public FromElement getFromElement(String aliasOrClassName) { + FromElement fromElement = ( FromElement ) fromElementByClassAlias.get( aliasOrClassName ); + if ( fromElement == null && getSessionFactoryHelper().isStrictJPAQLComplianceEnabled() ) { + fromElement = findIntendedAliasedFromElementBasedOnCrazyJPARequirements( aliasOrClassName ); + } + if ( fromElement == null && parentFromClause != null ) { + fromElement = parentFromClause.getFromElement( aliasOrClassName ); + } + return fromElement; + } + + private FromElement findIntendedAliasedFromElementBasedOnCrazyJPARequirements(String specifiedAlias) { + Iterator itr = fromElementByClassAlias.entrySet().iterator(); + while ( itr.hasNext() ) { + Map.Entry entry = ( Map.Entry ) itr.next(); + String alias = ( String ) entry.getKey(); + if ( alias.equalsIgnoreCase( specifiedAlias ) ) { + return ( FromElement ) entry.getValue(); + } + } + return null; + } + + /** + * Convenience method to check whether a given token represents a from-element alias. + * + * @param possibleAlias The potential from-element alias to check. + * @return True if the possibleAlias is an alias to a from-element visible + * from this point in the query graph. + */ + public boolean isFromElementAlias(String possibleAlias) { + boolean isAlias = containsClassAlias( possibleAlias ); + if ( !isAlias && parentFromClause != null ) { + // try the parent FromClause... + isAlias = parentFromClause.isFromElementAlias( possibleAlias ); + } + return isAlias; + } + + /** + * Returns the list of from elements in order. + * + * @return the list of from elements (instances of FromElement). + */ + public List getFromElements() { + return ASTUtil.collectChildren( this, fromElementPredicate ); + } + + public FromElement getFromElement() { + // TODO: not sure about this one +// List fromElements = getFromElements(); +// if ( fromElements == null || fromElements.isEmpty() ) { +// throw new QueryException( "Unable to locate from element" ); +// } + return (FromElement) getFromElements().get(0); + } + + /** + * Returns the list of from elements that will be part of the result set. + * + * @return the list of from elements that will be part of the result set. + */ + public List getProjectionList() { + return ASTUtil.collectChildren( this, projectionListPredicate ); + } + + public List getCollectionFetches() { + return ASTUtil.collectChildren( this, collectionFetchPredicate ); + } + + public boolean hasCollectionFecthes() { + return getCollectionFetches().size() > 0; + } + + public List getExplicitFromElements() { + return ASTUtil.collectChildren( this, explicitFromPredicate ); + } + + private static ASTUtil.FilterPredicate fromElementPredicate = new ASTUtil.IncludePredicate() { + public boolean include(AST node) { + FromElement fromElement = ( FromElement ) node; + return fromElement.isFromOrJoinFragment(); + } + }; + + private static ASTUtil.FilterPredicate projectionListPredicate = new ASTUtil.IncludePredicate() { + public boolean include(AST node) { + FromElement fromElement = ( FromElement ) node; + return fromElement.inProjectionList(); + } + }; + + private static ASTUtil.FilterPredicate collectionFetchPredicate = new ASTUtil.IncludePredicate() { + public boolean include(AST node) { + FromElement fromElement = ( FromElement ) node; + return fromElement.isFetch() && fromElement.getQueryableCollection() != null; + } + }; + + private static ASTUtil.FilterPredicate explicitFromPredicate = new ASTUtil.IncludePredicate() { + public boolean include(AST node) { + final FromElement fromElement = ( FromElement ) node; + return !fromElement.isImplied(); + } + }; + + FromElement findCollectionJoin(String path) { + return ( FromElement ) collectionJoinFromElementsByPath.get( path ); + } + + /** + * Look for an existing implicit or explicit join by the + * given path. + */ + FromElement findJoinByPath(String path) { + FromElement elem = findJoinByPathLocal( path ); + if ( elem == null && parentFromClause != null ) { + elem = parentFromClause.findJoinByPath( path ); + } + return elem; + } + + FromElement findJoinByPathLocal(String path) { + Map joinsByPath = fromElementsByPath; + return ( FromElement ) joinsByPath.get( path ); + } + + void addJoinByPathMap(String path, FromElement destination) { + if ( log.isDebugEnabled() ) { + log.debug( "addJoinByPathMap() : " + path + " -> " + destination ); + } + fromElementsByPath.put( path, destination ); + } + + /** + * Returns true if the from node contains the class alias name. + * + * @param alias The HQL class alias name. + * @return true if the from node contains the class alias name. + */ + public boolean containsClassAlias(String alias) { + boolean isAlias = fromElementByClassAlias.containsKey( alias ); + if ( !isAlias && getSessionFactoryHelper().isStrictJPAQLComplianceEnabled() ) { + isAlias = findIntendedAliasedFromElementBasedOnCrazyJPARequirements( alias ) != null; + } + return isAlias; + } + + /** + * Returns true if the from node contains the table alias name. + * + * @param alias The SQL table alias name. + * @return true if the from node contains the table alias name. + */ + public boolean containsTableAlias(String alias) { + return fromElementByTableAlias.keySet().contains( alias ); + } + + public String getDisplayText() { + return "FromClause{" + + "level=" + level + + ", fromElementCounter=" + fromElementCounter + + ", fromElements=" + fromElements.size() + + ", fromElementByClassAlias=" + fromElementByClassAlias.keySet() + + ", fromElementByTableAlias=" + fromElementByTableAlias.keySet() + + ", fromElementsByPath=" + fromElementsByPath.keySet() + + ", collectionJoinFromElementsByPath=" + collectionJoinFromElementsByPath.keySet() + + ", impliedElements=" + impliedElements + + "}"; + } + + public void setParentFromClause(FromClause parentFromClause) { + this.parentFromClause = parentFromClause; + if ( parentFromClause != null ) { + level = parentFromClause.getLevel() + 1; + parentFromClause.addChild( this ); + } + } + + private void addChild(FromClause fromClause) { + if ( childFromClauses == null ) { + childFromClauses = new HashSet(); + } + childFromClauses.add( fromClause ); + } + + public FromClause locateChildFromClauseWithJoinByPath(String path) { + if ( childFromClauses != null && !childFromClauses.isEmpty() ) { + Iterator children = childFromClauses.iterator(); + while ( children.hasNext() ) { + FromClause child = ( FromClause ) children.next(); + if ( child.findJoinByPathLocal( path ) != null ) { + return child; + } + } + } + return null; + } + + public void promoteJoin(FromElement elem) { + if ( log.isDebugEnabled() ) { + log.debug( "Promoting [" + elem + "] to [" + this + "]" ); + } + //TODO: implement functionality + // this might be painful to do here, as the "join post processing" for + // the subquery has already been performed (meaning that for + // theta-join dialects, the join conditions have already been moved + // over to the where clause). A "simple" solution here might to + // perform "join post processing" once for the entire query (including + // any subqueries) at one fell swoop + } + + public boolean isSubQuery() { + // TODO : this is broke for subqueries in statements other than selects... + return parentFromClause != null; + } + + void addCollectionJoinFromElementByPath(String path, FromElement destination) { + if ( log.isDebugEnabled() ) { + log.debug( "addCollectionJoinFromElementByPath() : " + path + " -> " + destination ); + } + collectionJoinFromElementsByPath.put( path, destination ); // Add the new node to the map so that we don't create it twice. + } + + public FromClause getParentFromClause() { + return parentFromClause; + } + + public int getLevel() { + return level; + } + + public int nextFromElementCounter() { + return fromElementCounter++; + } + + public void resolve() { + // Make sure that all from elements registered with this FROM clause are actually in the AST. + ASTIterator iter = new ASTIterator( this.getFirstChild() ); + Set childrenInTree = new HashSet(); + while ( iter.hasNext() ) { + childrenInTree.add( iter.next() ); + } + for ( Iterator iterator = fromElements.iterator(); iterator.hasNext(); ) { + FromElement fromElement = ( FromElement ) iterator.next(); + if ( !childrenInTree.contains( fromElement ) ) { + throw new IllegalStateException( "Element not in AST: " + fromElement ); + } + } + } + + public void addImpliedFromElement(FromElement element) { + impliedElements.add( element ); + } + + public String toString() { + return "FromClause{" + + "level=" + level + + "}"; + } +} diff --git a/src/org/hibernate/hql/ast/tree/FromElement.java b/src/org/hibernate/hql/ast/tree/FromElement.java new file mode 100644 index 0000000000..852d26cc0d --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/FromElement.java @@ -0,0 +1,551 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.util.LinkedList; +import java.util.List; + +import org.hibernate.QueryException; +import org.hibernate.engine.JoinSequence; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.hql.CollectionProperties; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.hql.ast.HqlSqlWalker; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; +import org.hibernate.util.StringHelper; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Represents a single mapped class mentioned in an HQL FROM clause. Each + * class reference will have the following symbols: + *

      + *
    • A class name - This is the name of the Java class that is mapped by Hibernate.
    • + *
    • [optional] an HQL alias for the mapped class.
    • + *
    • A table name - The name of the table that is mapped to the Java class.
    • + *
    • A table alias - The alias for the table that will be used in the resulting SQL.
    • + *
    + *
    + * User: josh
    + * Date: Dec 6, 2003
    + * Time: 10:28:17 AM
    + */ +public class FromElement extends HqlSqlWalkerNode implements DisplayableNode { + private static final Log log = LogFactory.getLog( FromElement.class ); + + private String className; + private String classAlias; + private String tableAlias; + private String collectionTableAlias; + private FromClause fromClause; + private boolean includeSubclasses = true; + private boolean collectionJoin = false; + private FromElement origin; + private String[] columns; + private String role; + private boolean fetch; + private boolean isAllPropertyFetch; + private boolean filter = false; + private int sequence = -1; + private boolean useFromFragment = false; + private boolean initialized = false; + private FromElementType elementType; + private boolean useWhereFragment = true; + private List destinations = new LinkedList(); + private boolean manyToMany = false; + private String withClauseFragment = null; + private String withClauseJoinAlias; + private boolean dereferencedBySuperclassProperty; + private boolean dereferencedBySubclassProperty; + + public FromElement() { + } + + public String getCollectionSuffix() { + return elementType.getCollectionSuffix(); + } + + public void setCollectionSuffix(String suffix) { + elementType.setCollectionSuffix(suffix); + } + + public void initializeCollection(FromClause fromClause, String classAlias, String tableAlias) { + doInitialize( fromClause, tableAlias, null, classAlias, null, null ); + initialized = true; + } + + public void initializeEntity( + FromClause fromClause, + String className, + EntityPersister persister, + EntityType type, + String classAlias, + String tableAlias) { + doInitialize( fromClause, tableAlias, className, classAlias, persister, type ); + this.sequence = fromClause.nextFromElementCounter(); + initialized = true; + } + + private void doInitialize(FromClause fromClause, String tableAlias, String className, String classAlias, + EntityPersister persister, EntityType type) { + if ( initialized ) { + throw new IllegalStateException( "Already initialized!!" ); + } + this.fromClause = fromClause; + this.tableAlias = tableAlias; + this.className = className; + this.classAlias = classAlias; + this.elementType = new FromElementType( this, persister, type ); + // Register the FromElement with the FROM clause, now that we have the names and aliases. + fromClause.registerFromElement( this ); + if ( log.isDebugEnabled() ) { + log.debug( fromClause + " : " + className + " (" + + ( classAlias == null ? "no alias" : classAlias ) + ") -> " + tableAlias ); + } + } + + public EntityPersister getEntityPersister() { + return elementType.getEntityPersister(); + } + + public Type getDataType() { + return elementType.getDataType(); + } + + public Type getSelectType() { + return elementType.getSelectType(); + } + + public Queryable getQueryable() { + return elementType.getQueryable(); + } + + public String getClassName() { + return className; + } + + public String getClassAlias() { + return classAlias; + //return classAlias == null ? className : classAlias; + } + + private String getTableName() { + Queryable queryable = getQueryable(); + return ( queryable != null ) ? queryable.getTableName() : "{none}"; + } + + public String getTableAlias() { + return tableAlias; + } + + /** + * Render the identifier select, but in a 'scalar' context (i.e. generate the column alias). + * + * @param i the sequence of the returned type + * @return the identifier select with the column alias. + */ + String renderScalarIdentifierSelect(int i) { + return elementType.renderScalarIdentifierSelect( i ); + } + + void checkInitialized() { + if ( !initialized ) { + throw new IllegalStateException( "FromElement has not been initialized!" ); + } + } + + /** + * Returns the identifier select SQL fragment. + * + * @param size The total number of returned types. + * @param k The sequence of the current returned type. + * @return the identifier select SQL fragment. + */ + String renderIdentifierSelect(int size, int k) { + return elementType.renderIdentifierSelect( size, k ); + } + + /** + * Returns the property select SQL fragment. + * + * @param size The total number of returned types. + * @param k The sequence of the current returned type. + * @return the property select SQL fragment. + */ + String renderPropertySelect(int size, int k) { + return elementType.renderPropertySelect( size, k, isAllPropertyFetch ); + } + + String renderCollectionSelectFragment(int size, int k) { + return elementType.renderCollectionSelectFragment( size, k ); + } + + String renderValueCollectionSelectFragment(int size, int k) { + return elementType.renderValueCollectionSelectFragment( size, k ); + } + + public FromClause getFromClause() { + return fromClause; + } + + /** + * Returns true if this FromElement was implied by a path, or false if this FROM element is explicitly declared in + * the FROM clause. + * + * @return true if this FromElement was implied by a path, or false if this FROM element is explicitly declared + */ + public boolean isImplied() { + return false; // This is an explicit FROM element. + } + + /** + * Returns additional display text for the AST node. + * + * @return String - The additional display text. + */ + public String getDisplayText() { + StringBuffer buf = new StringBuffer(); + buf.append( "FromElement{" ); + appendDisplayText( buf ); + buf.append( "}" ); + return buf.toString(); + } + + protected void appendDisplayText(StringBuffer buf) { + buf.append( isImplied() ? ( + isImpliedInFromClause() ? "implied in FROM clause" : "implied" ) + : "explicit" ); + buf.append( "," ).append( isCollectionJoin() ? "collection join" : "not a collection join" ); + buf.append( "," ).append( fetch ? "fetch join" : "not a fetch join" ); + buf.append( "," ).append( isAllPropertyFetch ? "fetch all properties" : "fetch non-lazy properties" ); + buf.append( ",classAlias=" ).append( getClassAlias() ); + buf.append( ",role=" ).append( role ); + buf.append( ",tableName=" ).append( getTableName() ); + buf.append( ",tableAlias=" ).append( getTableAlias() ); + FromElement origin = getRealOrigin(); + buf.append( ",origin=" ).append( origin == null ? "null" : origin.getText() ); + buf.append( ",colums={" ); + if ( columns != null ) { + for ( int i = 0; i < columns.length; i++ ) { + buf.append( columns[i] ); + if ( i < columns.length ) { + buf.append( " " ); + } + } + } + buf.append( ",className=" ).append( className ); + buf.append( "}" ); + } + + public int hashCode() { + return super.hashCode(); + } + + public boolean equals(Object obj) { + return super.equals( obj ); + } + + + public void setJoinSequence(JoinSequence joinSequence) { + elementType.setJoinSequence( joinSequence ); + } + + public JoinSequence getJoinSequence() { + return elementType.getJoinSequence(); + } + + public void setIncludeSubclasses(boolean includeSubclasses) { + if ( isDereferencedBySuperclassOrSubclassProperty() ) { + if ( !includeSubclasses && log.isTraceEnabled() ) { + log.trace( "attempt to disable subclass-inclusions", new Exception( "stack-trace source" ) ); + } + } + this.includeSubclasses = includeSubclasses; + } + + public boolean isIncludeSubclasses() { + return includeSubclasses; + } + + public boolean isDereferencedBySuperclassOrSubclassProperty() { + return dereferencedBySubclassProperty || dereferencedBySuperclassProperty; + } + + public String getIdentityColumn() { + checkInitialized(); + String table = getTableAlias(); + if ( table == null ) { + throw new IllegalStateException( "No table alias for node " + this ); + } + String[] cols; + String propertyName; + if ( getEntityPersister() != null && getEntityPersister().getEntityMetamodel() != null + && getEntityPersister().getEntityMetamodel().hasNonIdentifierPropertyNamedId() ) { + propertyName = getEntityPersister().getIdentifierPropertyName(); + } + else { + propertyName = EntityPersister.ENTITY_ID; + } + if ( getWalker().getStatementType() == HqlSqlWalker.SELECT ) { + cols = getPropertyMapping( propertyName ).toColumns( table, propertyName ); + } + else { + cols = getPropertyMapping( propertyName ).toColumns( propertyName ); + } + String result = StringHelper.join( ", ", cols ); + return cols.length == 1 ? result : "(" + result + ")"; + } + + public void setCollectionJoin(boolean collectionJoin) { + this.collectionJoin = collectionJoin; + } + + public boolean isCollectionJoin() { + return collectionJoin; + } + + public void setRole(String role) { + this.role = role; + } + + public void setQueryableCollection(QueryableCollection queryableCollection) { + elementType.setQueryableCollection( queryableCollection ); + } + + public QueryableCollection getQueryableCollection() { + return elementType.getQueryableCollection(); + } + + public void setColumns(String[] columns) { + this.columns = columns; + } + + public void setOrigin(FromElement origin, boolean manyToMany) { + this.origin = origin; + this.manyToMany = manyToMany; + origin.addDestination( this ); + if ( origin.getFromClause() == this.getFromClause() ) { + // TODO: Figure out a better way to get the FROM elements in a proper tree structure. + // If this is not the destination of a many-to-many, add it as a child of the origin. + if ( manyToMany ) { + ASTUtil.appendSibling( origin, this ); + } + else { + if ( !getWalker().isInFrom() && !getWalker().isInSelect() ) { + getFromClause().addChild( this ); + } + else { + origin.addChild( this ); + } + } + } + else if ( !getWalker().isInFrom() ) { + // HHH-276 : implied joins in a subselect where clause - The destination needs to be added + // to the destination's from clause. + getFromClause().addChild( this ); // Not sure if this is will fix everything, but it works. + } + else { + // Otherwise, the destination node was implied by the FROM clause and the FROM clause processor + // will automatically add it in the right place. + } + } + + public boolean isManyToMany() { + return manyToMany; + } + + private void addDestination(FromElement fromElement) { + destinations.add( fromElement ); + } + + public List getDestinations() { + return destinations; + } + + public FromElement getOrigin() { + return origin; + } + + public FromElement getRealOrigin() { + if ( origin == null ) { + return null; + } + if ( origin.getText() == null || "".equals( origin.getText() ) ) { + return origin.getRealOrigin(); + } + return origin; + } + + public Type getPropertyType(String propertyName, String propertyPath) { + return elementType.getPropertyType( propertyName, propertyPath ); + } + + public String[] toColumns(String tableAlias, String path, boolean inSelect) { + return elementType.toColumns( tableAlias, path, inSelect ); + } + + public String[] toColumns(String tableAlias, String path, boolean inSelect, boolean forceAlias) { + return elementType.toColumns( tableAlias, path, inSelect, forceAlias ); + } + + public PropertyMapping getPropertyMapping(String propertyName) { + return elementType.getPropertyMapping( propertyName ); + } + + public void setFetch(boolean fetch) { + this.fetch = fetch; + // Fetch can't be used with scroll() or iterate(). + if ( fetch && getWalker().isShallowQuery() ) { + throw new QueryException( QueryTranslator.ERROR_CANNOT_FETCH_WITH_ITERATE ); + } + } + + public boolean isFetch() { + return fetch; + } + + public int getSequence() { + return sequence; + } + + public void setFilter(boolean b) { + filter = b; + } + + public boolean isFilter() { + return filter; + } + + public boolean useFromFragment() { + checkInitialized(); + // If it's not implied or it is implied and it's a many to many join where the target wasn't found. + return !isImplied() || this.useFromFragment; + } + + public void setUseFromFragment(boolean useFromFragment) { + this.useFromFragment = useFromFragment; + } + + public boolean useWhereFragment() { + return useWhereFragment; + } + + public void setUseWhereFragment(boolean b) { + useWhereFragment = b; + } + + + public void setCollectionTableAlias(String collectionTableAlias) { + this.collectionTableAlias = collectionTableAlias; + } + + public String getCollectionTableAlias() { + return collectionTableAlias; + } + + public boolean isCollectionOfValuesOrComponents() { + return elementType.isCollectionOfValuesOrComponents(); + } + + public boolean isEntity() { + return elementType.isEntity(); + } + + public void setImpliedInFromClause(boolean flag) { + throw new UnsupportedOperationException( "Explicit FROM elements can't be implied in the FROM clause!" ); + } + + public boolean isImpliedInFromClause() { + return false; // Since this is an explicit FROM element, it can't be implied in the FROM clause. + } + + public void setInProjectionList(boolean inProjectionList) { + // Do nothing, eplicit from elements are *always* in the projection list. + } + + public boolean inProjectionList() { + return !isImplied() && isFromOrJoinFragment(); + } + + public boolean isFromOrJoinFragment() { + return getType() == SqlTokenTypes.FROM_FRAGMENT || getType() == SqlTokenTypes.JOIN_FRAGMENT; + } + + public boolean isAllPropertyFetch() { + return isAllPropertyFetch; + } + + public void setAllPropertyFetch(boolean fetch) { + isAllPropertyFetch = fetch; + } + + public String getWithClauseFragment() { + return withClauseFragment; + } + + public String getWithClauseJoinAlias() { + return withClauseJoinAlias; + } + + public void setWithClauseFragment(String withClauseJoinAlias, String withClauseFragment) { + this.withClauseJoinAlias = withClauseJoinAlias; + this.withClauseFragment = withClauseFragment; + } + + public boolean hasCacheablePersister() { + if ( getQueryableCollection() != null ) { + return getQueryableCollection().hasCache(); + } + else { + return getQueryable().hasCache(); + } + } + + public void handlePropertyBeingDereferenced(Type propertySource, String propertyName) { + if ( getQueryableCollection() != null && CollectionProperties.isCollectionProperty( propertyName ) ) { + // propertyName refers to something like collection.size... + return; + } + if ( propertySource.isComponentType() ) { + // property name is a sub-path of a component... + return; + } + + Queryable persister = getQueryable(); + if ( persister != null ) { + try { + Queryable.Declarer propertyDeclarer = persister.getSubclassPropertyDeclarer( propertyName ); + if ( log.isTraceEnabled() ) { + log.trace( "handling property dereference [" + persister.getEntityName() + " (" + getClassAlias() + ") -> " + propertyName + " (" + propertyDeclarer + ")]" ); + } + if ( propertyDeclarer == Queryable.Declarer.SUBCLASS ) { + dereferencedBySubclassProperty = true; + includeSubclasses = true; + } + else if ( propertyDeclarer == Queryable.Declarer.SUPERCLASS ) { + dereferencedBySuperclassProperty = true; + } + } + catch( QueryException ignore ) { + // ignore it; the incoming property could not be found so we + // cannot be sure what to do here. At the very least, the + // safest is to simply not apply any dereference toggling... + + } + } + } + + public boolean isDereferencedBySuperclassProperty() { + return dereferencedBySuperclassProperty; + } + + public boolean isDereferencedBySubclassProperty() { + return dereferencedBySubclassProperty; + } +} diff --git a/src/org/hibernate/hql/ast/tree/FromElementFactory.java b/src/org/hibernate/hql/ast/tree/FromElementFactory.java new file mode 100644 index 0000000000..601a03c0c2 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/FromElementFactory.java @@ -0,0 +1,500 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.engine.JoinSequence; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.hql.ast.util.AliasGenerator; +import org.hibernate.hql.ast.util.PathHelper; +import org.hibernate.hql.ast.util.SessionFactoryHelper; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.Joinable; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.JoinFragment; +import org.hibernate.type.AssociationType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; +import org.hibernate.util.StringHelper; + +import antlr.ASTFactory; +import antlr.SemanticException; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Encapsulates the creation of FromElements and JoinSequences. + * + * @author josh Oct 12, 2004 4:54:25 AM + */ +class FromElementFactory implements SqlTokenTypes { + + private static final Log log = LogFactory.getLog( FromElementFactory.class ); + + private FromClause fromClause; + private FromElement origin; + private String path; + + private String classAlias; + private String[] columns; + private boolean implied; + private boolean inElementsFunction; + private boolean collection; + private QueryableCollection queryableCollection; + private CollectionType collectionType; + + /** + * Creates entity from elements. + */ + public FromElementFactory(FromClause fromClause, FromElement origin, String path) { + this.fromClause = fromClause; + this.origin = origin; + this.path = path; + collection = false; + } + + /** + * Creates collection from elements. + */ + public FromElementFactory( + FromClause fromClause, + FromElement origin, + String path, + String classAlias, + String[] columns, + boolean implied) { + this( fromClause, origin, path ); + this.classAlias = classAlias; + this.columns = columns; + this.implied = implied; + collection = true; + } + + FromElement addFromElement() throws SemanticException { + FromClause parentFromClause = fromClause.getParentFromClause(); + if ( parentFromClause != null ) { + // Look up class name using the first identifier in the path. + String pathAlias = PathHelper.getAlias( path ); + FromElement parentFromElement = parentFromClause.getFromElement( pathAlias ); + if ( parentFromElement != null ) { + return createFromElementInSubselect( path, pathAlias, parentFromElement, classAlias ); + } + } + + EntityPersister entityPersister = fromClause.getSessionFactoryHelper().requireClassPersister( path ); + + FromElement elem = createAndAddFromElement( path, + classAlias, + entityPersister, + ( EntityType ) ( ( Queryable ) entityPersister ).getType(), + null ); + + // Add to the query spaces. + fromClause.getWalker().addQuerySpaces( entityPersister.getQuerySpaces() ); + + return elem; + } + + private FromElement createFromElementInSubselect( + String path, + String pathAlias, + FromElement parentFromElement, + String classAlias) throws SemanticException { + if ( log.isDebugEnabled() ) { + log.debug( "createFromElementInSubselect() : path = " + path ); + } + // Create an DotNode AST for the path and resolve it. + FromElement fromElement = evaluateFromElementPath( path, classAlias ); + EntityPersister entityPersister = fromElement.getEntityPersister(); + + // If the first identifier in the path referrs to the class alias (not the class name), then this + // is a correlated subselect. If it's a correlated sub-select, use the existing table alias. Otherwise + // generate a new one. + String tableAlias = null; + boolean correlatedSubselect = pathAlias.equals( parentFromElement.getClassAlias() ); + if ( correlatedSubselect ) { + tableAlias = fromElement.getTableAlias(); + } + else { + tableAlias = null; + } + + // If the from element isn't in the same clause, create a new from element. + if ( fromElement.getFromClause() != fromClause ) { + if ( log.isDebugEnabled() ) { + log.debug( "createFromElementInSubselect() : creating a new FROM element..." ); + } + fromElement = createFromElement( entityPersister ); + initializeAndAddFromElement( fromElement, + path, + classAlias, + entityPersister, + ( EntityType ) ( ( Queryable ) entityPersister ).getType(), + tableAlias + ); + } + if ( log.isDebugEnabled() ) { + log.debug( "createFromElementInSubselect() : " + path + " -> " + fromElement ); + } + return fromElement; + } + + private FromElement evaluateFromElementPath(String path, String classAlias) throws SemanticException { + ASTFactory factory = fromClause.getASTFactory(); + FromReferenceNode pathNode = ( FromReferenceNode ) PathHelper.parsePath( path, factory ); + pathNode.recursiveResolve( FromReferenceNode.ROOT_LEVEL, // This is the root level node. + false, // Generate an explicit from clause at the root. + classAlias, + null + ); + if ( pathNode.getImpliedJoin() != null ) { + return pathNode.getImpliedJoin(); + } + else { + return pathNode.getFromElement(); + } + } + + FromElement createCollectionElementsJoin( + QueryableCollection queryableCollection, + String collectionName) throws SemanticException { + JoinSequence collectionJoinSequence = fromClause.getSessionFactoryHelper() + .createCollectionJoinSequence( queryableCollection, collectionName ); + this.queryableCollection = queryableCollection; + return createCollectionJoin( collectionJoinSequence, null ); + } + + FromElement createCollection( + QueryableCollection queryableCollection, + String role, + int joinType, + boolean fetchFlag, + boolean indexed) + throws SemanticException { + if ( !collection ) { + throw new IllegalStateException( "FromElementFactory not initialized for collections!" ); + } + this.inElementsFunction = indexed; + FromElement elem; + this.queryableCollection = queryableCollection; + collectionType = queryableCollection.getCollectionType(); + String roleAlias = fromClause.getAliasGenerator().createName( role ); + + // Correlated subqueries create 'special' implied from nodes + // because correlated subselects can't use an ANSI-style join + boolean explicitSubqueryFromElement = fromClause.isSubQuery() && !implied; + if ( explicitSubqueryFromElement ) { + String pathRoot = StringHelper.root( path ); + FromElement origin = fromClause.getFromElement( pathRoot ); + if ( origin == null || origin.getFromClause() != fromClause ) { + implied = true; + } + } + + // super-duper-classic-parser-regression-testing-mojo-magic... + if ( explicitSubqueryFromElement && DotNode.useThetaStyleImplicitJoins ) { + implied = true; + } + + Type elementType = queryableCollection.getElementType(); + if ( elementType.isEntityType() ) { // A collection of entities... + elem = createEntityAssociation( role, roleAlias, joinType ); + } + else if ( elementType.isComponentType() ) { // A collection of components... + JoinSequence joinSequence = createJoinSequence( roleAlias, joinType ); + elem = createCollectionJoin( joinSequence, roleAlias ); + } + else { // A collection of scalar elements... + JoinSequence joinSequence = createJoinSequence( roleAlias, joinType ); + elem = createCollectionJoin( joinSequence, roleAlias ); + } + + elem.setRole( role ); + elem.setQueryableCollection( queryableCollection ); + // Don't include sub-classes for implied collection joins or subquery joins. + if ( implied ) { + elem.setIncludeSubclasses( false ); + } + + if ( explicitSubqueryFromElement ) { + elem.setInProjectionList( true ); // Treat explict from elements in sub-queries properly. + } + + if ( fetchFlag ) { + elem.setFetch( true ); + } + return elem; + } + + FromElement createEntityJoin( + String entityClass, + String tableAlias, + JoinSequence joinSequence, + boolean fetchFlag, + boolean inFrom, + EntityType type) throws SemanticException { + FromElement elem = createJoin( entityClass, tableAlias, joinSequence, type, false ); + elem.setFetch( fetchFlag ); + EntityPersister entityPersister = elem.getEntityPersister(); + int numberOfTables = entityPersister.getQuerySpaces().length; + if ( numberOfTables > 1 && implied && !elem.useFromFragment() ) { + if ( log.isDebugEnabled() ) { + log.debug( "createEntityJoin() : Implied multi-table entity join" ); + } + elem.setUseFromFragment( true ); + } + + // If this is an implied join in a FROM clause, then use ANSI-style joining, and set the + // flag on the FromElement that indicates that it was implied in the FROM clause itself. + if ( implied && inFrom ) { + joinSequence.setUseThetaStyle( false ); + elem.setUseFromFragment( true ); + elem.setImpliedInFromClause( true ); + } + if ( elem.getWalker().isSubQuery() ) { + // two conditions where we need to transform this to a theta-join syntax: + // 1) 'elem' is the "root from-element" in correlated subqueries + // 2) The DotNode.useThetaStyleImplicitJoins has been set to true + // and 'elem' represents an implicit join + if ( elem.getFromClause() != elem.getOrigin().getFromClause() || +// ( implied && DotNode.useThetaStyleImplicitJoins ) ) { + DotNode.useThetaStyleImplicitJoins ) { + // the "root from-element" in correlated subqueries do need this piece + elem.setType( FROM_FRAGMENT ); + joinSequence.setUseThetaStyle( true ); + elem.setUseFromFragment( false ); + } + } + + return elem; + } + + FromElement createElementJoin(QueryableCollection queryableCollection) throws SemanticException { + FromElement elem; + + implied = true; //TODO: always true for now, but not if we later decide to support elements() in the from clause + inElementsFunction = true; + Type elementType = queryableCollection.getElementType(); + if ( !elementType.isEntityType() ) { + throw new IllegalArgumentException( "Cannot create element join for a collection of non-entities!" ); + } + this.queryableCollection = queryableCollection; + SessionFactoryHelper sfh = fromClause.getSessionFactoryHelper(); + FromElement destination = null; + String tableAlias = null; + EntityPersister entityPersister = queryableCollection.getElementPersister(); + tableAlias = fromClause.getAliasGenerator().createName( entityPersister.getEntityName() ); + String associatedEntityName = entityPersister.getEntityName(); + EntityPersister targetEntityPersister = sfh.requireClassPersister( associatedEntityName ); + // Create the FROM element for the target (the elements of the collection). + destination = createAndAddFromElement( + associatedEntityName, + classAlias, + targetEntityPersister, + ( EntityType ) queryableCollection.getElementType(), + tableAlias + ); + // If the join is implied, then don't include sub-classes on the element. + if ( implied ) { + destination.setIncludeSubclasses( false ); + } + fromClause.addCollectionJoinFromElementByPath( path, destination ); +// origin.addDestination(destination); + // Add the query spaces. + fromClause.getWalker().addQuerySpaces( entityPersister.getQuerySpaces() ); + + CollectionType type = queryableCollection.getCollectionType(); + String role = type.getRole(); + String roleAlias = origin.getTableAlias(); + + String[] targetColumns = sfh.getCollectionElementColumns( role, roleAlias ); + AssociationType elementAssociationType = sfh.getElementAssociationType( type ); + + // Create the join element under the from element. + int joinType = JoinFragment.INNER_JOIN; + JoinSequence joinSequence = sfh.createJoinSequence( implied, elementAssociationType, tableAlias, joinType, targetColumns ); + elem = initializeJoin( path, destination, joinSequence, targetColumns, origin, false ); + elem.setUseFromFragment( true ); // The associated entity is implied, but it must be included in the FROM. + elem.setCollectionTableAlias( roleAlias ); // The collection alias is the role. + return elem; + } + + private FromElement createCollectionJoin(JoinSequence collectionJoinSequence, String tableAlias) throws SemanticException { + String text = queryableCollection.getTableName(); + AST ast = createFromElement( text ); + FromElement destination = ( FromElement ) ast; + Type elementType = queryableCollection.getElementType(); + if ( elementType.isCollectionType() ) { + throw new SemanticException( "Collections of collections are not supported!" ); + } + destination.initializeCollection( fromClause, classAlias, tableAlias ); + destination.setType( JOIN_FRAGMENT ); // Tag this node as a JOIN. + destination.setIncludeSubclasses( false ); // Don't include subclasses in the join. + destination.setCollectionJoin( true ); // This is a clollection join. + destination.setJoinSequence( collectionJoinSequence ); + destination.setOrigin( origin, false ); + destination.setCollectionTableAlias(tableAlias); +// origin.addDestination( destination ); +// This was the cause of HHH-242 +// origin.setType( FROM_FRAGMENT ); // Set the parent node type so that the AST is properly formed. + origin.setText( "" ); // The destination node will have all the FROM text. + origin.setCollectionJoin( true ); // The parent node is a collection join too (voodoo - see JoinProcessor) + fromClause.addCollectionJoinFromElementByPath( path, destination ); + fromClause.getWalker().addQuerySpaces( queryableCollection.getCollectionSpaces() ); + return destination; + } + + private FromElement createEntityAssociation( + String role, + String roleAlias, + int joinType) throws SemanticException { + FromElement elem; + Queryable entityPersister = ( Queryable ) queryableCollection.getElementPersister(); + String associatedEntityName = entityPersister.getEntityName(); + // Get the class name of the associated entity. + if ( queryableCollection.isOneToMany() ) { + if ( log.isDebugEnabled() ) { + log.debug( "createEntityAssociation() : One to many - path = " + path + " role = " + role + " associatedEntityName = " + associatedEntityName ); + } + JoinSequence joinSequence = createJoinSequence( roleAlias, joinType ); + + elem = createJoin( associatedEntityName, roleAlias, joinSequence, ( EntityType ) queryableCollection.getElementType(), false ); + } + else { + if ( log.isDebugEnabled() ) { + log.debug( "createManyToMany() : path = " + path + " role = " + role + " associatedEntityName = " + associatedEntityName ); + } + elem = createManyToMany( role, associatedEntityName, + roleAlias, entityPersister, ( EntityType ) queryableCollection.getElementType(), joinType ); + fromClause.getWalker().addQuerySpaces( queryableCollection.getCollectionSpaces() ); + } + elem.setCollectionTableAlias( roleAlias ); + return elem; + } + + private FromElement createJoin( + String entityClass, + String tableAlias, + JoinSequence joinSequence, + EntityType type, + boolean manyToMany) throws SemanticException { + // origin, path, implied, columns, classAlias, + EntityPersister entityPersister = fromClause.getSessionFactoryHelper().requireClassPersister( entityClass ); + FromElement destination = createAndAddFromElement( entityClass, + classAlias, + entityPersister, + type, + tableAlias ); + return initializeJoin( path, destination, joinSequence, getColumns(), origin, manyToMany ); + } + + private FromElement createManyToMany( + String role, + String associatedEntityName, + String roleAlias, + Queryable entityPersister, + EntityType type, + int joinType) throws SemanticException { + FromElement elem; + SessionFactoryHelper sfh = fromClause.getSessionFactoryHelper(); + if ( inElementsFunction /*implied*/ ) { + // For implied many-to-many, just add the end join. + JoinSequence joinSequence = createJoinSequence( roleAlias, joinType ); + elem = createJoin( associatedEntityName, roleAlias, joinSequence, type, true ); + } + else { + // For an explicit many-to-many relationship, add a second join from the intermediate + // (many-to-many) table to the destination table. Also, make sure that the from element's + // idea of the destination is the destination table. + String tableAlias = fromClause.getAliasGenerator().createName( entityPersister.getEntityName() ); + String[] secondJoinColumns = sfh.getCollectionElementColumns( role, roleAlias ); + // Add the second join, the one that ends in the destination table. + JoinSequence joinSequence = createJoinSequence( roleAlias, joinType ); + joinSequence.addJoin( sfh.getElementAssociationType( collectionType ), tableAlias, joinType, secondJoinColumns ); + elem = createJoin( associatedEntityName, tableAlias, joinSequence, type, false ); + elem.setUseFromFragment( true ); + } + return elem; + } + + private JoinSequence createJoinSequence(String roleAlias, int joinType) { + SessionFactoryHelper sessionFactoryHelper = fromClause.getSessionFactoryHelper(); + String[] joinColumns = getColumns(); + if ( collectionType == null ) { + throw new IllegalStateException( "collectionType is null!" ); + } + return sessionFactoryHelper.createJoinSequence( implied, collectionType, roleAlias, joinType, joinColumns ); + } + + private FromElement createAndAddFromElement( + String className, + String classAlias, + EntityPersister entityPersister, + EntityType type, + String tableAlias) { + if ( !( entityPersister instanceof Joinable ) ) { + throw new IllegalArgumentException( "EntityPersister " + entityPersister + " does not implement Joinable!" ); + } + FromElement element = createFromElement( entityPersister ); + initializeAndAddFromElement( element, className, classAlias, entityPersister, type, tableAlias ); + return element; + } + + private void initializeAndAddFromElement( + FromElement element, + String className, + String classAlias, + EntityPersister entityPersister, + EntityType type, + String tableAlias) { + if ( tableAlias == null ) { + AliasGenerator aliasGenerator = fromClause.getAliasGenerator(); + tableAlias = aliasGenerator.createName( entityPersister.getEntityName() ); + } + element.initializeEntity( fromClause, className, entityPersister, type, classAlias, tableAlias ); + } + + private FromElement createFromElement(EntityPersister entityPersister) { + Joinable joinable = ( Joinable ) entityPersister; + String text = joinable.getTableName(); + AST ast = createFromElement( text ); + FromElement element = ( FromElement ) ast; + return element; + } + + private AST createFromElement(String text) { + AST ast = ASTUtil.create( fromClause.getASTFactory(), + implied ? IMPLIED_FROM : FROM_FRAGMENT, // This causes the factory to instantiate the desired class. + text ); + // Reset the node type, because the rest of the system is expecting FROM_FRAGMENT, all we wanted was + // for the factory to create the right sub-class. This might get reset again later on anyway to make the + // SQL generation simpler. + ast.setType( FROM_FRAGMENT ); + return ast; + } + + private FromElement initializeJoin( + String path, + FromElement destination, + JoinSequence joinSequence, + String[] columns, + FromElement origin, + boolean manyToMany) { + destination.setType( JOIN_FRAGMENT ); + destination.setJoinSequence( joinSequence ); + destination.setColumns( columns ); + destination.setOrigin( origin, manyToMany ); + fromClause.addJoinByPathMap( path, destination ); + return destination; + } + + private String[] getColumns() { + if ( columns == null ) { + throw new IllegalStateException( "No foriegn key columns were supplied!" ); + } + return columns; + } +} diff --git a/src/org/hibernate/hql/ast/tree/FromElementType.java b/src/org/hibernate/hql/ast/tree/FromElementType.java new file mode 100644 index 0000000000..371dd5c221 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/FromElementType.java @@ -0,0 +1,415 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.util.Map; + +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.util.ArrayHelper; +import org.hibernate.engine.JoinSequence; +import org.hibernate.hql.CollectionProperties; +import org.hibernate.hql.CollectionSubqueryFactory; +import org.hibernate.hql.NameGenerator; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.persister.collection.CollectionPropertyMapping; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.Joinable; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Delegate that handles the type and join sequence information for a FromElement. + * + * @author josh Feb 12, 2005 10:17:34 AM + */ +class FromElementType { + private static final Log log = LogFactory.getLog( FromElementType.class ); + + private FromElement fromElement; + private EntityType entityType; + private EntityPersister persister; + private QueryableCollection queryableCollection; + private CollectionPropertyMapping collectionPropertyMapping; + private JoinSequence joinSequence; + private String collectionSuffix; + + public FromElementType(FromElement fromElement, EntityPersister persister, EntityType entityType) { + this.fromElement = fromElement; + this.persister = persister; + this.entityType = entityType; + if ( persister != null ) { + fromElement.setText( ( ( Queryable ) persister ).getTableName() + " " + getTableAlias() ); + } + } + + private String getTableAlias() { + return fromElement.getTableAlias(); + } + + private String getCollectionTableAlias() { + return fromElement.getCollectionTableAlias(); + } + + public String getCollectionSuffix() { + return collectionSuffix; + } + + public void setCollectionSuffix(String suffix) { + collectionSuffix = suffix; + } + + public EntityPersister getEntityPersister() { + return persister; + } + + public Type getDataType() { + if ( persister == null ) { + if ( queryableCollection == null ) { + return null; + } + return queryableCollection.getType(); + } + else { + return entityType; + } + } + + public Type getSelectType() { + if (entityType==null) return null; + boolean shallow = fromElement.getFromClause().getWalker().isShallowQuery(); + return TypeFactory.manyToOne( entityType.getAssociatedEntityName(), shallow ); + } + + /** + * Returns the Hibernate queryable implementation for the HQL class. + * + * @return the Hibernate queryable implementation for the HQL class. + */ + public Queryable getQueryable() { + return ( persister instanceof Queryable ) ? ( Queryable ) persister : null; + } + + /** + * Render the identifier select, but in a 'scalar' context (i.e. generate the column alias). + * + * @param i the sequence of the returned type + * @return the identifier select with the column alias. + */ + String renderScalarIdentifierSelect(int i) { + checkInitialized(); + String[] cols = getPropertyMapping( EntityPersister.ENTITY_ID ).toColumns( getTableAlias(), EntityPersister.ENTITY_ID ); + StringBuffer buf = new StringBuffer(); + // For property references generate . as + for ( int j = 0; j < cols.length; j++ ) { + String column = cols[j]; + if ( j > 0 ) { + buf.append( ", " ); + } + buf.append( column ).append( " as " ).append( NameGenerator.scalarName( i, j ) ); + } + return buf.toString(); + } + + /** + * Returns the identifier select SQL fragment. + * + * @param size The total number of returned types. + * @param k The sequence of the current returned type. + * @return the identifier select SQL fragment. + */ + String renderIdentifierSelect(int size, int k) { + checkInitialized(); + // Render the identifier select fragment using the table alias. + if ( fromElement.getFromClause().isSubQuery() ) { + // TODO: Replace this with a more elegant solution. + String[] idColumnNames = ( persister != null ) ? + ( ( Queryable ) persister ).getIdentifierColumnNames() : new String[0]; + StringBuffer buf = new StringBuffer(); + for ( int i = 0; i < idColumnNames.length; i++ ) { + buf.append( fromElement.getTableAlias() ).append( '.' ).append( idColumnNames[i] ); + if ( i != idColumnNames.length - 1 ) buf.append( ", " ); + } + return buf.toString(); + } + else { + if (persister==null) { + throw new QueryException( "not an entity" ); + } + String fragment = ( ( Queryable ) persister ).identifierSelectFragment( getTableAlias(), getSuffix( size, k ) ); + return trimLeadingCommaAndSpaces( fragment ); + } + } + + private String getSuffix(int size, int sequence) { + return generateSuffix( size, sequence ); + } + + private static String generateSuffix(int size, int k) { + String suffix = size == 1 ? "" : Integer.toString( k ) + '_'; + return suffix; + } + + private void checkInitialized() { + fromElement.checkInitialized(); + } + + /** + * Returns the property select SQL fragment. + * @param size The total number of returned types. + * @param k The sequence of the current returned type. + * @return the property select SQL fragment. + */ + String renderPropertySelect(int size, int k, boolean allProperties) { + checkInitialized(); + if ( persister == null ) { + return ""; + } + else { + String fragment = ( ( Queryable ) persister ).propertySelectFragment( + getTableAlias(), + getSuffix( size, k ), + allProperties + ); + return trimLeadingCommaAndSpaces( fragment ); + } + } + + String renderCollectionSelectFragment(int size, int k) { + if ( queryableCollection == null ) { + return ""; + } + else { + if ( collectionSuffix == null ) { + collectionSuffix = generateSuffix( size, k ); + } + String fragment = queryableCollection.selectFragment( getCollectionTableAlias(), collectionSuffix ); + return trimLeadingCommaAndSpaces( fragment ); + } + } + + public String renderValueCollectionSelectFragment(int size, int k) { + if ( queryableCollection == null ) { + return ""; + } + else { + if ( collectionSuffix == null ) { + collectionSuffix = generateSuffix( size, k ); + } + String fragment = queryableCollection.selectFragment( getTableAlias(), collectionSuffix ); + return trimLeadingCommaAndSpaces( fragment ); + } + } + + /** + * This accounts for a quirk in Queryable, where it sometimes generates ', ' in front of the + * SQL fragment. :-P + * + * @param fragment An SQL fragment. + * @return The fragment, without the leading comma and spaces. + */ + private static String trimLeadingCommaAndSpaces(String fragment) { + if ( fragment.length() > 0 && fragment.charAt( 0 ) == ',' ) { + fragment = fragment.substring( 1 ); + } + fragment = fragment.trim(); + return fragment.trim(); + } + + public void setJoinSequence(JoinSequence joinSequence) { + this.joinSequence = joinSequence; + } + + public JoinSequence getJoinSequence() { + if ( joinSequence != null ) { + return joinSequence; + } + + // Class names in the FROM clause result in a JoinSequence (the old FromParser does this). + if ( persister instanceof Joinable ) { + Joinable joinable = ( Joinable ) persister; + return fromElement.getSessionFactoryHelper().createJoinSequence().setRoot( joinable, getTableAlias() ); + } + else { + return null; // TODO: Should this really return null? If not, figure out something better to do here. + } + } + + public void setQueryableCollection(QueryableCollection queryableCollection) { + if ( this.queryableCollection != null ) { + throw new IllegalStateException( "QueryableCollection is already defined for " + this + "!" ); + } + this.queryableCollection = queryableCollection; + if ( !queryableCollection.isOneToMany() ) { + // For many-to-many joins, use the tablename from the queryable collection for the default text. + fromElement.setText( queryableCollection.getTableName() + " " + getTableAlias() ); + } + } + + public QueryableCollection getQueryableCollection() { + return queryableCollection; + } + + /** + * Returns the type of a property, given it's name (the last part) and the full path. + * + * @param propertyName The last part of the full path to the property. + * @return The type. + * @0param propertyPath The full property path. + */ + public Type getPropertyType(String propertyName, String propertyPath) { + checkInitialized(); + Type type = null; + // If this is an entity and the property is the identifier property, then use getIdentifierType(). + // Note that the propertyName.equals( propertyPath ) checks whether we have a component + // key reference, where the component class property name is the same as the + // entity id property name; if the two are not equal, this is the case and + // we'd need to "fall through" to using the property mapping. + if ( persister != null && propertyName.equals( propertyPath ) && propertyName.equals( persister.getIdentifierPropertyName() ) ) { + type = persister.getIdentifierType(); + } + else { // Otherwise, use the property mapping. + PropertyMapping mapping = getPropertyMapping( propertyName ); + type = mapping.toType( propertyPath ); + } + if ( type == null ) { + throw new MappingException( "Property " + propertyName + " does not exist in " + + ( ( queryableCollection == null ) ? "class" : "collection" ) + " " + + ( ( queryableCollection == null ) ? fromElement.getClassName() : queryableCollection.getRole() ) ); + } + return type; + } + + String[] toColumns(String tableAlias, String path, boolean inSelect) { + return toColumns( tableAlias, path, inSelect, false ); + } + + String[] toColumns(String tableAlias, String path, boolean inSelect, boolean forceAlias) { + checkInitialized(); + PropertyMapping propertyMapping = getPropertyMapping( path ); + // If this from element is a collection and the path is a collection property (maxIndex, etc.) then + // generate a sub-query. + if ( !inSelect && queryableCollection != null && CollectionProperties.isCollectionProperty( path ) ) { + Map enabledFilters = fromElement.getWalker().getEnabledFilters(); + String subquery = CollectionSubqueryFactory.createCollectionSubquery( + joinSequence, + enabledFilters, + propertyMapping.toColumns( tableAlias, path ) + ); + if ( log.isDebugEnabled() ) { + log.debug( "toColumns(" + tableAlias + "," + path + ") : subquery = " + subquery ); + } + return new String[]{"(" + subquery + ")"}; + } + else { + if ( forceAlias ) { + return propertyMapping.toColumns( tableAlias, path ); + } + else if ( fromElement.getWalker().getStatementType() == HqlSqlTokenTypes.SELECT ) { + return propertyMapping.toColumns( tableAlias, path ); + } + else if ( fromElement.getWalker().getCurrentClauseType() == HqlSqlTokenTypes.SELECT ) { + return propertyMapping.toColumns( tableAlias, path ); + } + else if ( fromElement.getWalker().isSubQuery() ) { + // for a subquery, the alias to use depends on a few things (we + // already know this is not an overall SELECT): + // 1) if this FROM_ELEMENT represents a correlation to the + // outer-most query + // A) if the outer query represents a multi-table + // persister, we need to use the given alias + // in anticipation of one of the multi-table + // executors being used (as this subquery will + // actually be used in the "id select" phase + // of that multi-table executor) + // B) otherwise, we need to use the persister's + // table name as the column qualification + // 2) otherwise (not correlated), use the given alias + if ( isCorrelation() ) { + if ( isMultiTable() ) { + return propertyMapping.toColumns( tableAlias, path ); + } + else { + return propertyMapping.toColumns( extractTableName(), path ); + } + } + else { + return propertyMapping.toColumns( tableAlias, path ); + } + } + else { + String[] columns = propertyMapping.toColumns( path ); + log.trace( "Using non-qualified column reference [" + path + " -> (" + ArrayHelper.toString( columns ) + ")]" ); + return columns; + } + } + } + + private boolean isCorrelation() { + FromClause top = fromElement.getWalker().getFinalFromClause(); + return fromElement.getFromClause() != fromElement.getWalker().getCurrentFromClause() && + fromElement.getFromClause() == top; + } + + private boolean isMultiTable() { + // should be safe to only ever expect EntityPersister references here + return fromElement.getQueryable() != null && + fromElement.getQueryable().isMultiTable(); + } + + private String extractTableName() { + // should be safe to only ever expect EntityPersister references here + return fromElement.getQueryable().getTableName(); + } + + PropertyMapping getPropertyMapping(String propertyName) { + checkInitialized(); + if ( queryableCollection == null ) { // Not a collection? + return ( PropertyMapping ) persister; // Return the entity property mapping. + } + // If the property is a special collection property name, return a CollectionPropertyMapping. + if ( CollectionProperties.isCollectionProperty( propertyName ) ) { + if ( collectionPropertyMapping == null ) { + collectionPropertyMapping = new CollectionPropertyMapping( queryableCollection ); + } + return collectionPropertyMapping; + } + if ( queryableCollection.getElementType().isAnyType() ) { + // collection of mappings... + // used to circumvent the component-collection check below... + return queryableCollection; + + } + if ( queryableCollection.getElementType().isComponentType() ) { + // Collection of components. + if ( propertyName.equals( EntityPersister.ENTITY_ID ) ) { + return ( PropertyMapping ) queryableCollection.getOwnerEntityPersister(); + } + } + return queryableCollection; + } + + public boolean isCollectionOfValuesOrComponents() { + if ( persister == null ) { + if ( queryableCollection == null ) { + return false; + } + else { + return !queryableCollection.getElementType().isEntityType(); + } + } + else { + return false; + } + } + + public boolean isEntity() { + return persister != null; + } +} diff --git a/src/org/hibernate/hql/ast/tree/FromReferenceNode.java b/src/org/hibernate/hql/ast/tree/FromReferenceNode.java new file mode 100644 index 0000000000..b304b1e812 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/FromReferenceNode.java @@ -0,0 +1,109 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import antlr.SemanticException; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Represents a reference to a FROM element, for example a class alias in a WHERE clause. + * + * @author josh Jul 21, 2004 7:02:04 AM + */ +public abstract class FromReferenceNode extends AbstractSelectExpression + implements ResolvableNode, DisplayableNode, InitializeableNode, PathNode { + + private static final Log log = LogFactory.getLog( FromReferenceNode.class ); + + private FromElement fromElement; + private boolean resolved = false; + public static final int ROOT_LEVEL = 0; + + public FromElement getFromElement() { + return fromElement; + } + + public void setFromElement(FromElement fromElement) { + this.fromElement = fromElement; + } + + /** + * Resolves the left hand side of the DOT. + * + * @throws SemanticException + */ + public void resolveFirstChild() throws SemanticException { + } + + public String getPath() { + return getOriginalText(); + } + + public boolean isResolved() { + return resolved; + } + + public void setResolved() { + this.resolved = true; + if ( log.isDebugEnabled() ) { + log.debug( "Resolved : " + this.getPath() + " -> " + this.getText() ); + } + } + + public String getDisplayText() { + StringBuffer buf = new StringBuffer(); + buf.append( "{" ).append( ( fromElement == null ) ? "no fromElement" : fromElement.getDisplayText() ); + buf.append( "}" ); + return buf.toString(); + } + + public void recursiveResolve(int level, boolean impliedAtRoot, String classAlias) throws SemanticException { + recursiveResolve( level, impliedAtRoot, classAlias, this ); + } + + public void recursiveResolve(int level, boolean impliedAtRoot, String classAlias, AST parent) throws SemanticException { + AST lhs = getFirstChild(); + int nextLevel = level + 1; + if ( lhs != null ) { + FromReferenceNode n = ( FromReferenceNode ) lhs; + n.recursiveResolve( nextLevel, impliedAtRoot, null, this ); + } + resolveFirstChild(); + boolean impliedJoin = true; + if ( level == ROOT_LEVEL && !impliedAtRoot ) { + impliedJoin = false; + } + resolve( true, impliedJoin, classAlias, parent ); + } + + public boolean isReturnableEntity() throws SemanticException { + return !isScalar() && fromElement.isEntity(); + } + + public void resolveInFunctionCall(boolean generateJoin, boolean implicitJoin) throws SemanticException { + resolve( generateJoin, implicitJoin ); + } + + public void resolve(boolean generateJoin, boolean implicitJoin) throws SemanticException { + resolve( generateJoin, implicitJoin, null ); + } + + public void resolve(boolean generateJoin, boolean implicitJoin, String classAlias) throws SemanticException { + resolve( generateJoin, implicitJoin, classAlias, null ); + } + + public void prepareForDot(String propertyName) throws SemanticException { + } + + /** + * Sub-classes can override this method if they produce implied joins (e.g. DotNode). + * + * @return an implied join created by this from reference. + */ + public FromElement getImpliedJoin() { + return null; + } + +} diff --git a/src/org/hibernate/hql/ast/tree/HqlSqlWalkerNode.java b/src/org/hibernate/hql/ast/tree/HqlSqlWalkerNode.java new file mode 100644 index 0000000000..d0073c24f0 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/HqlSqlWalkerNode.java @@ -0,0 +1,40 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.ast.HqlSqlWalker; +import org.hibernate.hql.ast.util.AliasGenerator; +import org.hibernate.hql.ast.util.SessionFactoryHelper; + +import antlr.ASTFactory; + +/** + * A semantic analysis node, that points back to the main analyzer. + * + * @author josh Sep 24, 2004 4:08:13 PM + */ +public class HqlSqlWalkerNode extends SqlNode implements InitializeableNode { + /** + * A pointer back to the phase 2 processor. + */ + private HqlSqlWalker walker; + + public void initialize(Object param) { + walker = ( HqlSqlWalker ) param; + } + + public HqlSqlWalker getWalker() { + return walker; + } + + public SessionFactoryHelper getSessionFactoryHelper() { + return walker.getSessionFactoryHelper(); + } + + public ASTFactory getASTFactory() { + return walker.getASTFactory(); + } + + public AliasGenerator getAliasGenerator() { + return walker.getAliasGenerator(); + } +} diff --git a/src/org/hibernate/hql/ast/tree/IdentNode.java b/src/org/hibernate/hql/ast/tree/IdentNode.java new file mode 100644 index 0000000000..b370875f4e --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/IdentNode.java @@ -0,0 +1,307 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import antlr.SemanticException; +import antlr.collections.AST; +import org.hibernate.QueryException; +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.JoinFragment; +import org.hibernate.type.CollectionType; +import org.hibernate.type.Type; +import org.hibernate.util.StringHelper; + +import java.util.List; + +/** + * Represents an identifier all by itself, which may be a function name, + * a class alias, or a form of naked property-ref depending on the + * context. + * + * @author josh Aug 16, 2004 7:20:55 AM + */ +public class IdentNode extends FromReferenceNode implements SelectExpression { + + private static final int UNKNOWN = 0; + private static final int PROPERTY_REF = 1; + private static final int COMPONENT_REF = 2; + + private boolean nakedPropertyRef = false; + + public void resolveIndex(AST parent) throws SemanticException { + // An ident node can represent an index expression if the ident + // represents a naked property ref + // *Note: this makes the assumption (which is currently the case + // in the hql-sql grammar) that the ident is first resolved + // itself (addrExpr -> resolve()). The other option, if that + // changes, is to call resolve from here; but it is + // currently un-needed overhead. + if (!(isResolved() && nakedPropertyRef)) { + throw new UnsupportedOperationException(); + } + + String propertyName = getOriginalText(); + if (!getDataType().isCollectionType()) { + throw new SemanticException("Collection expected; [" + propertyName + "] does not refer to a collection property"); + } + + // TODO : most of below was taken verbatim from DotNode; should either delegate this logic or super-type it + CollectionType type = (CollectionType) getDataType(); + String role = type.getRole(); + QueryableCollection queryableCollection = getSessionFactoryHelper().requireQueryableCollection(role); + + String alias = null; // DotNode uses null here... + String columnTableAlias = getFromElement().getTableAlias(); + int joinType = JoinFragment.INNER_JOIN; + boolean fetch = false; + + FromElementFactory factory = new FromElementFactory( + getWalker().getCurrentFromClause(), + getFromElement(), + propertyName, + alias, + getFromElement().toColumns(columnTableAlias, propertyName, false), + true + ); + FromElement elem = factory.createCollection(queryableCollection, role, joinType, fetch, true); + setFromElement(elem); + getWalker().addQuerySpaces(queryableCollection.getCollectionSpaces()); // Always add the collection's query spaces. + } + + public void resolve(boolean generateJoin, boolean implicitJoin, String classAlias, AST parent) { + if (!isResolved()) { + if (getWalker().getCurrentFromClause().isFromElementAlias(getText())) { + if (resolveAsAlias()) { + setResolved(); + // We represent a from-clause alias + } + } + else if (parent != null && parent.getType() == SqlTokenTypes.DOT) { + DotNode dot = (DotNode) parent; + if (parent.getFirstChild() == this) { + if (resolveAsNakedComponentPropertyRefLHS(dot)) { + // we are the LHS of the DOT representing a naked comp-prop-ref + setResolved(); + } + } + else { + if (resolveAsNakedComponentPropertyRefRHS(dot)) { + // we are the RHS of the DOT representing a naked comp-prop-ref + setResolved(); + } + } + } + else { + int result = resolveAsNakedPropertyRef(); + if (result == PROPERTY_REF) { + // we represent a naked (simple) prop-ref + setResolved(); + } + else if (result == COMPONENT_REF) { + // EARLY EXIT!!! return so the resolve call explicitly coming from DotNode can + // resolve this... + return; + } + } + + // if we are still not resolved, we might represent a constant. + // needed to add this here because the allowance of + // naked-prop-refs in the grammar collides with the + // definition of literals/constants ("nondeterminism"). + // TODO: cleanup the grammar so that "processConstants" is always just handled from here + if (!isResolved()) { + try { + getWalker().getLiteralProcessor().processConstant(this, false); + } + catch (Throwable ignore) { + // just ignore it for now, it'll get resolved later... + } + } + } + } + + private boolean resolveAsAlias() { + // This is not actually a constant, but a reference to FROM element. + FromElement element = getWalker().getCurrentFromClause().getFromElement(getText()); + if (element != null) { + setFromElement(element); + setText(element.getIdentityColumn()); + setType(SqlTokenTypes.ALIAS_REF); + return true; + } + return false; + } + + private Type getNakedPropertyType(FromElement fromElement) + { + if (fromElement == null) { + return null; + } + String property = getOriginalText(); + Type propertyType = null; + try { + propertyType = fromElement.getPropertyType(property, property); + } + catch (Throwable t) { + } + return propertyType; + } + + private int resolveAsNakedPropertyRef() { + FromElement fromElement = locateSingleFromElement(); + if (fromElement == null) { + return UNKNOWN; + } + Queryable persister = fromElement.getQueryable(); + if (persister == null) { + return UNKNOWN; + } + Type propertyType = getNakedPropertyType(fromElement); + if (propertyType == null) { + // assume this ident's text does *not* refer to a property on the given persister + return UNKNOWN; + } + + if ((propertyType.isComponentType() || propertyType.isAssociationType() )) { + return COMPONENT_REF; + } + + setFromElement(fromElement); + String property = getText(); + String[] columns = getWalker().isSelectStatement() + ? persister.toColumns(fromElement.getTableAlias(), property) + : persister.toColumns(property); + String text = StringHelper.join(", ", columns); + setText(columns.length == 1 ? text : "(" + text + ")"); + setType(SqlTokenTypes.SQL_TOKEN); + + // these pieces are needed for usage in select clause + super.setDataType(propertyType); + nakedPropertyRef = true; + + return PROPERTY_REF; + } + + private boolean resolveAsNakedComponentPropertyRefLHS(DotNode parent) { + FromElement fromElement = locateSingleFromElement(); + if (fromElement == null) { + return false; + } + + Type componentType = getNakedPropertyType(fromElement); + if ( componentType == null ) { + throw new QueryException( "Unable to resolve path [" + parent.getPath() + "], unexpected token [" + getOriginalText() + "]" ); + } + if (!componentType.isComponentType()) { + throw new QueryException("Property '" + getOriginalText() + "' is not a component. Use an alias to reference associations or collections."); + } + + Type propertyType = null; // used to set the type of the parent dot node + String propertyPath = getText() + "." + getNextSibling().getText(); + try { + // check to see if our "propPath" actually + // represents a property on the persister + propertyType = fromElement.getPropertyType(getText(), propertyPath); + } + catch (Throwable t) { + // assume we do *not* refer to a property on the given persister + return false; + } + + setFromElement(fromElement); + parent.setPropertyPath(propertyPath); + parent.setDataType(propertyType); + + return true; + } + + private boolean resolveAsNakedComponentPropertyRefRHS(DotNode parent) { + FromElement fromElement = locateSingleFromElement(); + if (fromElement == null) { + return false; + } + + Type propertyType = null; + String propertyPath = parent.getLhs().getText() + "." + getText(); + try { + // check to see if our "propPath" actually + // represents a property on the persister + propertyType = fromElement.getPropertyType(getText(), propertyPath); + } + catch (Throwable t) { + // assume we do *not* refer to a property on the given persister + return false; + } + + setFromElement(fromElement); + // this piece is needed for usage in select clause + super.setDataType(propertyType); + nakedPropertyRef = true; + + return true; + } + + private FromElement locateSingleFromElement() { + List fromElements = getWalker().getCurrentFromClause().getFromElements(); + if (fromElements == null || fromElements.size() != 1) { + // TODO : should this be an error? + return null; + } + FromElement element = (FromElement) fromElements.get(0); + if (element.getClassAlias() != null) { + // naked property-refs cannot be used with an aliased from element + return null; + } + return element; + } + + public Type getDataType() { + Type type = super.getDataType(); + if (type != null) return type; + FromElement fe = getFromElement(); + if (fe != null) return fe.getDataType(); + SQLFunction sf = getWalker().getSessionFactoryHelper().findSQLFunction(getText()); + return sf == null ? null : sf.getReturnType(null, null); + } + + public void setScalarColumnText(int i) throws SemanticException { + if (nakedPropertyRef) { + // do *not* over-write the column text, as that has already been + // "rendered" during resolve + ColumnHelper.generateSingleScalarColumn(this, i); + } + else { + FromElement fe = getFromElement(); + if (fe != null) { + setText(fe.renderScalarIdentifierSelect(i)); + } + else { + ColumnHelper.generateSingleScalarColumn(this, i); + } + } + } + + public String getDisplayText() { + StringBuffer buf = new StringBuffer(); + + if (getType() == SqlTokenTypes.ALIAS_REF) { + buf.append("{alias=").append(getOriginalText()); + if (getFromElement() == null) { + buf.append(", no from element"); + } + else { + buf.append(", className=").append(getFromElement().getClassName()); + buf.append(", tableAlias=").append(getFromElement().getTableAlias()); + } + buf.append("}"); + } + else { + buf.append("{originalText=" + getOriginalText()).append("}"); + } + return buf.toString(); + } + +} diff --git a/src/org/hibernate/hql/ast/tree/ImpliedFromElement.java b/src/org/hibernate/hql/ast/tree/ImpliedFromElement.java new file mode 100644 index 0000000000..3b1e9ea8d0 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/ImpliedFromElement.java @@ -0,0 +1,57 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +/** + * Represents a FROM element implied by a path expression or a collection reference. + * + * @author josh Feb 10, 2005 12:31:03 AM + */ +public class ImpliedFromElement extends FromElement { + /** + * True if this from element was implied from a path in the FROM clause, but not + * explicitly declard in the from clause. + */ + private boolean impliedInFromClause = false; + + /** + * True if this implied from element should be included in the projection list. + */ + private boolean inProjectionList = false; + + public boolean isImplied() { + return true; + } + + public void setImpliedInFromClause(boolean flag) { + impliedInFromClause = flag; + } + + public boolean isImpliedInFromClause() { + return impliedInFromClause; + } + + public void setInProjectionList(boolean inProjectionList) { + this.inProjectionList = inProjectionList; + } + + public boolean inProjectionList() { + return inProjectionList && isFromOrJoinFragment(); + } + + public boolean isIncludeSubclasses() { + return false; // Never include subclasses for implied from elements. + } + + /** + * Returns additional display text for the AST node. + * + * @return String - The additional display text. + */ + public String getDisplayText() { + StringBuffer buf = new StringBuffer(); + buf.append( "ImpliedFromElement{" ); + appendDisplayText( buf ); + buf.append( "}" ); + return buf.toString(); + } +} diff --git a/src/org/hibernate/hql/ast/tree/InLogicOperatorNode.java b/src/org/hibernate/hql/ast/tree/InLogicOperatorNode.java new file mode 100644 index 0000000000..9ba2cb2e67 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/InLogicOperatorNode.java @@ -0,0 +1,40 @@ +package org.hibernate.hql.ast.tree; + +import antlr.SemanticException; +import antlr.collections.AST; +import org.hibernate.type.Type; + +/** + * @author Steve Ebersole + */ +public class InLogicOperatorNode extends BinaryLogicOperatorNode implements BinaryOperatorNode { + + public Node getInList() { + return getRightHandOperand(); + } + + public void initialize() throws SemanticException { + Node lhs = getLeftHandOperand(); + if ( lhs == null ) { + throw new SemanticException( "left-hand operand of in operator was null" ); + } + Node inList = getInList(); + if ( inList == null ) { + throw new SemanticException( "right-hand operand of in operator was null" ); + } + + // for expected parameter type injection, we expect that the lhs represents + // some form of property ref and that the children of the in-list represent + // one-or-more params. + if ( SqlNode.class.isAssignableFrom( lhs.getClass() ) ) { + Type lhsType = ( ( SqlNode ) lhs ).getDataType(); + AST inListChild = inList.getFirstChild(); + while ( inListChild != null ) { + if ( ExpectedTypeAwareNode.class.isAssignableFrom( inListChild.getClass() ) ) { + ( ( ExpectedTypeAwareNode ) inListChild ).setExpectedType( lhsType ); + } + inListChild = inListChild.getNextSibling(); + } + } + } +} diff --git a/src/org/hibernate/hql/ast/tree/IndexNode.java b/src/org/hibernate/hql/ast/tree/IndexNode.java new file mode 100644 index 0000000000..4507b69f68 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/IndexNode.java @@ -0,0 +1,134 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.QueryException; +import org.hibernate.engine.JoinSequence; +import org.hibernate.hql.ast.SqlGenerator; +import org.hibernate.hql.ast.util.SessionFactoryHelper; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.type.CollectionType; +import org.hibernate.type.Type; + +import antlr.RecognitionException; +import antlr.SemanticException; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Represents the [] operator and provides it's semantics. + * + * @author josh Aug 14, 2004 7:07:10 AM + */ +public class IndexNode extends FromReferenceNode { + + private static final Log log = LogFactory.getLog( IndexNode.class ); + + public void setScalarColumnText(int i) throws SemanticException { + throw new UnsupportedOperationException( "An IndexNode cannot generate column text!" ); + } + + public void prepareForDot(String propertyName) throws SemanticException { + FromElement fromElement = getFromElement(); + if ( fromElement == null ) { + throw new IllegalStateException( "No FROM element for index operator!" ); + } + QueryableCollection queryableCollection = fromElement.getQueryableCollection(); + if ( queryableCollection != null && !queryableCollection.isOneToMany() ) { + + FromReferenceNode collectionNode = ( FromReferenceNode ) getFirstChild(); + String path = collectionNode.getPath() + "[]." + propertyName; + if ( log.isDebugEnabled() ) { + log.debug( "Creating join for many-to-many elements for " + path ); + } + FromElementFactory factory = new FromElementFactory( fromElement.getFromClause(), fromElement, path ); + // This will add the new from element to the origin. + FromElement elementJoin = factory.createElementJoin( queryableCollection ); + setFromElement( elementJoin ); + } + } + + public void resolveIndex(AST parent) throws SemanticException { + throw new UnsupportedOperationException(); + } + + public void resolve(boolean generateJoin, boolean implicitJoin, String classAlias, AST parent) + throws SemanticException { + if ( isResolved() ) { + return; + } + FromReferenceNode collectionNode = ( FromReferenceNode ) getFirstChild(); + SessionFactoryHelper sessionFactoryHelper = getSessionFactoryHelper(); + collectionNode.resolveIndex( this ); // Fully resolve the map reference, create implicit joins. + + Type type = collectionNode.getDataType(); + if ( !type.isCollectionType() ) { + throw new SemanticException( "The [] operator cannot be applied to type " + type.toString() ); + } + String collectionRole = ( ( CollectionType ) type ).getRole(); + QueryableCollection queryableCollection = sessionFactoryHelper.requireQueryableCollection( collectionRole ); + if ( !queryableCollection.hasIndex() ) { + throw new QueryException( "unindexed fromElement before []: " + collectionNode.getPath() ); + } + + // Generate the inner join -- The elements need to be joined to the collection they are in. + FromElement fromElement = collectionNode.getFromElement(); + String elementTable = fromElement.getTableAlias(); + FromClause fromClause = fromElement.getFromClause(); + String path = collectionNode.getPath(); + + FromElement elem = fromClause.findCollectionJoin( path ); + if ( elem == null ) { + FromElementFactory factory = new FromElementFactory( fromClause, fromElement, path ); + elem = factory.createCollectionElementsJoin( queryableCollection, elementTable ); + if ( log.isDebugEnabled() ) { + log.debug( "No FROM element found for the elements of collection join path " + path + + ", created " + elem ); + } + } + else { + if ( log.isDebugEnabled() ) { + log.debug( "FROM element found for collection join path " + path ); + } + } + + // Add the condition to the join sequence that qualifies the indexed element. + AST index = collectionNode.getNextSibling(); // The index should be a constant, which will have been processed already. + if ( index == null ) { + throw new QueryException( "No index value!" ); + } + + setFromElement( fromElement ); // The 'from element' that represents the elements of the collection. + + // Sometimes use the element table alias, sometimes use the... umm... collection table alias (many to many) + String collectionTableAlias = elementTable; + if ( elem.getCollectionTableAlias() != null ) { + collectionTableAlias = elem.getCollectionTableAlias(); + } + + // TODO: get SQL rendering out of here, create an AST for the join expressions. + // Use the SQL generator grammar to generate the SQL text for the index expression. + JoinSequence joinSequence = fromElement.getJoinSequence(); + String[] indexCols = queryableCollection.getIndexColumnNames(); + if ( indexCols.length != 1 ) { + throw new QueryException( "composite-index appears in []: " + collectionNode.getPath() ); + } + SqlGenerator gen = new SqlGenerator( getSessionFactoryHelper().getFactory() ); + try { + gen.simpleExpr( index ); //TODO: used to be exprNoParens! was this needed? + } + catch ( RecognitionException e ) { + throw new QueryException( e.getMessage(), e ); + } + String expression = gen.getSQL(); + joinSequence.addCondition( collectionTableAlias + '.' + indexCols[0] + " = " + expression ); + + // Now, set the text for this node. It should be the element columns. + String[] elementColumns = queryableCollection.getElementColumnNames( elementTable ); + setText( elementColumns[0] ); + setResolved(); + } + + +} diff --git a/src/org/hibernate/hql/ast/tree/InitializeableNode.java b/src/org/hibernate/hql/ast/tree/InitializeableNode.java new file mode 100644 index 0000000000..36b3ee3a1d --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/InitializeableNode.java @@ -0,0 +1,15 @@ +// $Id$ + +package org.hibernate.hql.ast.tree; + +/** + * An interface for initializeable AST nodes. + */ +public interface InitializeableNode { + /** + * Initializes the node with the parameter. + * + * @param param the initialization parameter. + */ + void initialize(Object param); +} diff --git a/src/org/hibernate/hql/ast/tree/InsertStatement.java b/src/org/hibernate/hql/ast/tree/InsertStatement.java new file mode 100644 index 0000000000..a05a5a40b8 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/InsertStatement.java @@ -0,0 +1,55 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.QueryException; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; + +/** + * Defines a top-level AST node representing an HQL "insert select" statement. + * + * @author Steve Ebersole + */ +public class InsertStatement extends AbstractStatement { + + /** + * @see Statement#getStatementType() + */ + public int getStatementType() { + return HqlSqlTokenTypes.INSERT; + } + + /** + * @see Statement#needsExecutor() + */ + public boolean needsExecutor() { + return true; + } + + /** + * Performs detailed semantic validation on this insert statement tree. + * + * @throws QueryException Indicates validation failure. + */ + public void validate() throws QueryException { + getIntoClause().validateTypes( getSelectClause() ); + } + + /** + * Retreive this insert statement's into-clause. + * + * @return The into-clause + */ + public IntoClause getIntoClause() { + return ( IntoClause ) getFirstChild(); + } + + /** + * Retreive this insert statement's select-clause. + * + * @return The select-clause. + */ + public SelectClause getSelectClause() { + return ( ( QueryNode ) getIntoClause().getNextSibling() ).getSelectClause(); + } + +} diff --git a/src/org/hibernate/hql/ast/tree/IntoClause.java b/src/org/hibernate/hql/ast/tree/IntoClause.java new file mode 100644 index 0000000000..b6f1bd4506 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/IntoClause.java @@ -0,0 +1,234 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.util.ArrayList; +import java.util.List; +import java.sql.Types; + +import org.hibernate.QueryException; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; + +import antlr.collections.AST; + +/** + * Represents an entity referenced in the INTO clause of an HQL + * INSERT statement. + * + * @author Steve Ebersole + */ +public class IntoClause extends HqlSqlWalkerNode implements DisplayableNode { + + private Queryable persister; + private String columnSpec = ""; + private Type[] types; + + private boolean discriminated; + private boolean explicitIdInsertion; + private boolean explicitVersionInsertion; + + + public void initialize(Queryable persister) { + if ( persister.isAbstract() ) { + throw new QueryException( "cannot insert into abstract class (no table)" ); + } + this.persister = persister; + initializeColumns(); + + if ( getWalker().getSessionFactoryHelper().hasPhysicalDiscriminatorColumn( persister ) ) { + discriminated = true; + columnSpec += ", " + persister.getDiscriminatorColumnName(); + } + + resetText(); + } + + private void resetText() { + setText( "into " + getTableName() + " ( " + columnSpec + " )" ); + } + + public String getTableName() { + return persister.getSubclassTableName( 0 ); + } + + public Queryable getQueryable() { + return persister; + } + + public String getEntityName() { + return persister.getEntityName(); + } + + public Type[] getInsertionTypes() { + return types; + } + + public boolean isDiscriminated() { + return discriminated; + } + + public boolean isExplicitIdInsertion() { + return explicitIdInsertion; + } + + public boolean isExplicitVersionInsertion() { + return explicitVersionInsertion; + } + + public void prependIdColumnSpec() { + columnSpec = persister.getIdentifierColumnNames()[0] + ", " + columnSpec; + resetText(); + } + + public void prependVersionColumnSpec() { + columnSpec = persister.getPropertyColumnNames( persister.getVersionProperty() )[0] + ", " + columnSpec; + resetText(); + } + + public void validateTypes(SelectClause selectClause) throws QueryException { + Type[] selectTypes = selectClause.getQueryReturnTypes(); + if ( selectTypes.length != types.length ) { + throw new QueryException( "number of select types did not match those for insert" ); + } + + for ( int i = 0; i < types.length; i++ ) { + if ( !areCompatible( types[i], selectTypes[i] ) ) { + throw new QueryException( + "insertion type [" + types[i] + "] and selection type [" + + selectTypes[i] + "] at position " + i + " are not compatible" + ); + } + } + + // otherwise, everything ok. + } + + /** + * Returns additional display text for the AST node. + * + * @return String - The additional display text. + */ + public String getDisplayText() { + StringBuffer buf = new StringBuffer(); + buf.append( "IntoClause{" ); + buf.append( "entityName=" ).append( getEntityName() ); + buf.append( ",tableName=" ).append( getTableName() ); + buf.append( ",columns={" ).append( columnSpec ).append( "}" ); + buf.append( "}" ); + return buf.toString(); + } + + private void initializeColumns() { + AST propertySpec = getFirstChild(); + List types = new ArrayList(); + visitPropertySpecNodes( propertySpec.getFirstChild(), types ); + this.types = ArrayHelper.toTypeArray( types ); + columnSpec = columnSpec.substring( 0, columnSpec.length() - 2 ); + } + + private void visitPropertySpecNodes(AST propertyNode, List types) { + if ( propertyNode == null ) { + return; + } + // TODO : we really need to be able to deal with component paths here also; + // this is difficult because the hql-sql grammar expects all those node types + // to be FromReferenceNodes. One potential fix here would be to convert the + // IntoClause to just use a FromClause/FromElement combo (as a child of the + // InsertStatement) and move all this logic into the InsertStatement. That's + // probably the easiest approach (read: least amount of changes to the grammar + // and code), but just doesn't feel right as then an insert would contain + // 2 from-clauses + String name = propertyNode.getText(); + if ( isSuperclassProperty( name ) ) { + throw new QueryException( "INSERT statements cannot refer to superclass/joined properties [" + name + "]" ); + } + + if ( name.equals( persister.getIdentifierPropertyName() ) ) { + explicitIdInsertion = true; + } + + if ( persister.isVersioned() ) { + if ( name.equals( persister.getPropertyNames()[ persister.getVersionProperty() ] ) ) { + explicitVersionInsertion = true; + } + } + + String[] columnNames = persister.toColumns( name ); + renderColumns( columnNames ); + types.add( persister.toType( name ) ); + + // visit width-first, then depth + visitPropertySpecNodes( propertyNode.getNextSibling(), types ); + visitPropertySpecNodes( propertyNode.getFirstChild(), types ); + } + + private void renderColumns(String[] columnNames) { + for ( int i = 0; i < columnNames.length; i++ ) { + columnSpec += columnNames[i] + ", "; + } + } + + private boolean isSuperclassProperty(String propertyName) { + // really there are two situations where it should be ok to allow the insertion + // into properties defined on a superclass: + // 1) union-subclass with an abstract root entity + // 2) discrim-subclass + // + // #1 is handled already because of the fact that + // UnionSubclassPersister alreay always returns 0 + // for this call... + // + // we may want to disallow it for discrim-subclass just for + // consistency-sake (currently does not work anyway)... + return persister.getSubclassPropertyTableNumber( propertyName ) != 0; + } + + /** + * Determine whether the two types are "assignment compatible". + * + * @param target The type defined in the into-clause. + * @param source The type defined in the select clause. + * @return True if they are assignment compatible. + */ + private boolean areCompatible(Type target, Type source) { + if ( target.equals( source ) ) { + // if the types report logical equivalence, return true... + return true; + } + + // otherwise, perform a "deep equivalence" check... + + if ( !target.getReturnedClass().isAssignableFrom( source.getReturnedClass() ) ) { + return false; + } + + int[] targetDatatypes = target.sqlTypes( getSessionFactoryHelper().getFactory() ); + int[] sourceDatatypes = source.sqlTypes( getSessionFactoryHelper().getFactory() ); + + if ( targetDatatypes.length != sourceDatatypes.length ) { + return false; + } + + for ( int i = 0; i < targetDatatypes.length; i++ ) { + if ( !areSqlTypesCompatible( targetDatatypes[i], sourceDatatypes[i] ) ) { + return false; + } + } + + return true; + } + + private boolean areSqlTypesCompatible(int target, int source) { + switch ( target ) { + case Types.TIMESTAMP: + return source == Types.DATE || source == Types.TIME || source == Types.TIMESTAMP; + case Types.DATE: + return source == Types.DATE || source == Types.TIMESTAMP; + case Types.TIME: + return source == Types.TIME || source == Types.TIMESTAMP; + default: + return target == source; + } + } +} diff --git a/src/org/hibernate/hql/ast/tree/JavaConstantNode.java b/src/org/hibernate/hql/ast/tree/JavaConstantNode.java new file mode 100644 index 0000000000..e024bc5065 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/JavaConstantNode.java @@ -0,0 +1,66 @@ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; +import org.hibernate.type.LiteralType; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.dialect.Dialect; +import org.hibernate.QueryException; +import org.hibernate.hql.QueryTranslator; + +/** + * A node representing a static Java constant. + * + * @author Steve Ebersole + */ +public class JavaConstantNode extends Node implements ExpectedTypeAwareNode, SessionFactoryAwareNode { + + private SessionFactoryImplementor factory; + + private String constantExpression; + private Object constantValue; + private Type heuristicType; + + private Type expectedType; + + public void setText(String s) { + // for some reason the antlr.CommonAST initialization routines force + // this method to get called twice. The first time with an empty string + if ( StringHelper.isNotEmpty( s ) ) { + constantExpression = s; + constantValue = ReflectHelper.getConstantValue( s ); + heuristicType = TypeFactory.heuristicType( constantValue.getClass().getName() ); + super.setText( s ); + } + } + + public void setExpectedType(Type expectedType) { + this.expectedType = expectedType; + } + + public Type getExpectedType() { + return expectedType; + } + + public void setSessionFactory(SessionFactoryImplementor factory) { + this.factory = factory; + } + + private String resolveToLiteralString(Type type) { + try { + LiteralType literalType = ( LiteralType ) type; + Dialect dialect = factory.getDialect(); + return literalType.objectToSQLString( constantValue, dialect ); + } + catch ( Throwable t ) { + throw new QueryException( QueryTranslator.ERROR_CANNOT_FORMAT_LITERAL + constantExpression, t ); + } + } + + public String getRenderText(SessionFactoryImplementor sessionFactory) { + Type type = expectedType == null ? heuristicType : expectedType; + return resolveToLiteralString( type ); + } +} diff --git a/src/org/hibernate/hql/ast/tree/LiteralNode.java b/src/org/hibernate/hql/ast/tree/LiteralNode.java new file mode 100644 index 0000000000..80e08959c9 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/LiteralNode.java @@ -0,0 +1,41 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.Hibernate; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.type.Type; + +import antlr.SemanticException; + +/** + * Represents a literal. + * + * @author josh Jan 8, 2005 10:09:53 AM + */ +public class LiteralNode extends AbstractSelectExpression implements HqlSqlTokenTypes { + + public void setScalarColumnText(int i) throws SemanticException { + ColumnHelper.generateSingleScalarColumn( this, i ); + } + + public Type getDataType() { + switch ( getType() ) { + case NUM_INT: + return Hibernate.INTEGER; + case NUM_FLOAT: + return Hibernate.FLOAT; + case NUM_LONG: + return Hibernate.LONG; + case NUM_DOUBLE: + return Hibernate.DOUBLE; + case QUOTED_STRING: + return Hibernate.STRING; + case TRUE: + case FALSE: + return Hibernate.BOOLEAN; + default: + return null; + } + } +} diff --git a/src/org/hibernate/hql/ast/tree/MethodNode.java b/src/org/hibernate/hql/ast/tree/MethodNode.java new file mode 100644 index 0000000000..a513ede74b --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/MethodNode.java @@ -0,0 +1,190 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.util.Arrays; + +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.hql.CollectionProperties; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.persister.collection.CollectionPropertyNames; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.type.Type; + +import antlr.SemanticException; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Represents a method call. + * + * @author josh Aug 16, 2004 7:59:42 AM + */ +public class MethodNode extends AbstractSelectExpression implements SelectExpression { + + private static final Log log = LogFactory.getLog( MethodNode.class ); + + private String methodName; + private FromElement fromElement; + private String[] selectColumns; + private SQLFunction function; + private boolean inSelect; + + public void resolve(boolean inSelect) throws SemanticException { + // Get the function name node. + AST name = getFirstChild(); + initializeMethodNode( name, inSelect ); + AST exprList = name.getNextSibling(); + // If the expression list has exactly one expression, and the type of the expression is a collection + // then this might be a collection function, such as index(c) or size(c). + if ( ASTUtil.hasExactlyOneChild( exprList ) && isCollectionPropertyMethod() ) { + collectionProperty( exprList.getFirstChild(), name ); + } + else { + dialectFunction( exprList ); + } + } + + public SQLFunction getSQLFunction() { + return function; + } + + private void dialectFunction(AST exprList) { + function = getSessionFactoryHelper().findSQLFunction( methodName ); + if ( function != null ) { + AST firstChild = exprList != null ? exprList.getFirstChild() : null; + Type functionReturnType = getSessionFactoryHelper() + .findFunctionReturnType( methodName, firstChild ); + setDataType( functionReturnType ); + } + //TODO: + /*else { + methodName = (String) getWalker().getTokenReplacements().get( methodName ); + }*/ + } + + public boolean isCollectionPropertyMethod() { + return CollectionProperties.isAnyCollectionProperty( methodName ); + } + + public void initializeMethodNode(AST name, boolean inSelect) { + name.setType( SqlTokenTypes.METHOD_NAME ); + String text = name.getText(); + methodName = text.toLowerCase(); // Use the lower case function name. + this.inSelect = inSelect; // Remember whether we're in a SELECT clause or not. + } + + private String getMethodName() { + return methodName; + } + + private void collectionProperty(AST path, AST name) throws SemanticException { + if ( path == null ) { + throw new SemanticException( "Collection function " + name.getText() + " has no path!" ); + } + + SqlNode expr = ( SqlNode ) path; + Type type = expr.getDataType(); + if ( log.isDebugEnabled() ) { + log.debug( "collectionProperty() : name=" + name + " type=" + type ); + } + + resolveCollectionProperty( expr ); + } + + public boolean isScalar() throws SemanticException { + // Method expressions in a SELECT should always be considered scalar. + return true; + } + + public void resolveCollectionProperty(AST expr) throws SemanticException { + String propertyName = CollectionProperties.getNormalizedPropertyName( getMethodName() ); + if ( expr instanceof FromReferenceNode ) { + FromReferenceNode collectionNode = ( FromReferenceNode ) expr; + // If this is 'elements' then create a new FROM element. + if ( CollectionPropertyNames.COLLECTION_ELEMENTS.equals( propertyName ) ) { + handleElements( collectionNode, propertyName ); + } + else { + // Not elements(x) + fromElement = collectionNode.getFromElement(); + setDataType( fromElement.getPropertyType( propertyName, propertyName ) ); + selectColumns = fromElement.toColumns( fromElement.getTableAlias(), propertyName, inSelect ); + } + if ( collectionNode instanceof DotNode ) { + prepareAnyImplicitJoins( ( DotNode ) collectionNode ); + } + if ( !inSelect ) { + fromElement.setText( "" ); + fromElement.setUseWhereFragment( false ); + } + prepareSelectColumns( selectColumns ); + setText( selectColumns[0] ); + setType( SqlTokenTypes.SQL_TOKEN ); + } + else { + throw new SemanticException( + "Unexpected expression " + expr + + " found for collection function " + propertyName + ); + } + } + + private void prepareAnyImplicitJoins(DotNode dotNode) throws SemanticException { + if ( dotNode.getLhs() instanceof DotNode ) { + DotNode lhs = ( DotNode ) dotNode.getLhs(); + FromElement lhsOrigin = lhs.getFromElement(); + if ( lhsOrigin != null && "".equals( lhsOrigin.getText() ) ) { + String lhsOriginText = lhsOrigin.getQueryable().getTableName() + + " " + lhsOrigin.getTableAlias(); + lhsOrigin.setText( lhsOriginText ); + } + prepareAnyImplicitJoins( lhs ); + } + } + + private void handleElements(FromReferenceNode collectionNode, String propertyName) { + FromElement collectionFromElement = collectionNode.getFromElement(); + QueryableCollection queryableCollection = collectionFromElement.getQueryableCollection(); + + String path = collectionNode.getPath() + "[]." + propertyName; + log.debug( "Creating elements for " + path ); + + fromElement = collectionFromElement; + if ( !collectionFromElement.isCollectionOfValuesOrComponents() ) { + getWalker().addQuerySpaces( queryableCollection.getElementPersister().getQuerySpaces() ); + } + + setDataType( queryableCollection.getElementType() ); + selectColumns = collectionFromElement.toColumns( fromElement.getTableAlias(), propertyName, inSelect ); + } + + public void setScalarColumnText(int i) throws SemanticException { + if ( selectColumns == null ) { // Dialect function + ColumnHelper.generateSingleScalarColumn( this, i ); + } + else { // Collection 'property function' + ColumnHelper.generateScalarColumns( this, selectColumns, i ); + } + } + + protected void prepareSelectColumns(String[] columns) { + return; + } + + public FromElement getFromElement() { + return fromElement; + } + + public String getDisplayText() { + return "{" + + "method=" + getMethodName() + + ",selectColumns=" + ( selectColumns == null ? + null : Arrays.asList( selectColumns ) ) + + ",fromElement=" + fromElement.getTableAlias() + + "}"; + } +} diff --git a/src/org/hibernate/hql/ast/tree/Node.java b/src/org/hibernate/hql/ast/tree/Node.java new file mode 100644 index 0000000000..ba59e8a2f5 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/Node.java @@ -0,0 +1,74 @@ +package org.hibernate.hql.ast.tree; + +import antlr.collections.AST; +import antlr.Token; +import org.hibernate.util.StringHelper; +import org.hibernate.engine.SessionFactoryImplementor; + +/** + * Base node class for use by Hibernate within its AST trees. + * + * @author Joshua Davis + * @author Steve Ebersole + */ +public class Node extends antlr.CommonAST { + private String filename; + private int line; + private int column; + private int textLength; + + public Node() { + super(); + } + + public Node(Token tok) { + super(tok); // This will call initialize(tok)! + } + + /** + * Retrieve the text to be used for rendering this particular node. + * + * @param sessionFactory The session factory + * @return The text to use for rendering + */ + public String getRenderText(SessionFactoryImplementor sessionFactory) { + // The basic implementation is to simply use the node's text + return getText(); + } + + public void initialize(Token tok) { + super.initialize(tok); + filename = tok.getFilename(); + line = tok.getLine(); + column = tok.getColumn(); + String text = tok.getText(); + textLength = StringHelper.isEmpty(text) ? 0 : text.length(); + } + + public void initialize(AST t) { + super.initialize( t ); + if ( t instanceof Node ) { + Node n = (Node)t; + filename = n.filename; + line = n.line; + column = n.column; + textLength = n.textLength; + } + } + + public String getFilename() { + return filename; + } + + public int getLine() { + return line; + } + + public int getColumn() { + return column; + } + + public int getTextLength() { + return textLength; + } +} diff --git a/src/org/hibernate/hql/ast/tree/OperatorNode.java b/src/org/hibernate/hql/ast/tree/OperatorNode.java new file mode 100644 index 0000000000..b46b979ff1 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/OperatorNode.java @@ -0,0 +1,24 @@ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; +import antlr.SemanticException; + +/** + * Contract for nodes representing operators (logic or arithmetic). + * + * @author Steve Ebersole + */ +public interface OperatorNode { + /** + * Called by the tree walker during hql-sql semantic analysis + * after the operator sub-tree is completely built. + */ + public abstract void initialize() throws SemanticException; + + /** + * Retrieves the data type for the overall operator expression. + * + * @return The expression's data type. + */ + public Type getDataType(); +} diff --git a/src/org/hibernate/hql/ast/tree/OrderByClause.java b/src/org/hibernate/hql/ast/tree/OrderByClause.java new file mode 100644 index 0000000000..c0f7acdcda --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/OrderByClause.java @@ -0,0 +1,26 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; + +import antlr.collections.AST; + +/** + * Implementation of OrderByClause. + * + * @author Steve Ebersole + */ +public class OrderByClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes { + + public void addOrderFragment(String orderByFragment) { + AST fragment = ASTUtil.create( getASTFactory(), SQL_TOKEN, orderByFragment ); + if ( getFirstChild() == null ) { + setFirstChild( fragment ); + } + else { + addChild( fragment ); + } + } + +} diff --git a/src/org/hibernate/hql/ast/tree/ParameterNode.java b/src/org/hibernate/hql/ast/tree/ParameterNode.java new file mode 100644 index 0000000000..a0f3b9dd73 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/ParameterNode.java @@ -0,0 +1,52 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.param.ParameterSpecification; +import org.hibernate.type.Type; +import org.hibernate.engine.SessionFactoryImplementor; + +/** + * Implementation of ParameterNode. + * + * @author Steve Ebersole + */ +public class ParameterNode extends HqlSqlWalkerNode implements DisplayableNode, ExpectedTypeAwareNode { + private ParameterSpecification parameterSpecification; + + public ParameterSpecification getHqlParameterSpecification() { + return parameterSpecification; + } + + public void setHqlParameterSpecification(ParameterSpecification parameterSpecification) { + this.parameterSpecification = parameterSpecification; + } + + public String getDisplayText() { + return "{" + ( parameterSpecification == null ? "???" : parameterSpecification.renderDisplayInfo() ) + "}"; + } + + public void setExpectedType(Type expectedType) { + getHqlParameterSpecification().setExpectedType( expectedType ); + setDataType( expectedType ); + } + + public Type getExpectedType() { + return getHqlParameterSpecification() == null ? null : getHqlParameterSpecification().getExpectedType(); + } + + public String getRenderText(SessionFactoryImplementor sessionFactory) { + int count = 0; + if ( getExpectedType() != null && ( count = getExpectedType().getColumnSpan( sessionFactory ) ) > 1 ) { + StringBuffer buffer = new StringBuffer(); + buffer.append( "(?" ); + for ( int i = 1; i < count; i++ ) { + buffer.append( ", ?" ); + } + buffer.append( ")" ); + return buffer.toString(); + } + else { + return "?"; + } + } +} diff --git a/src/org/hibernate/hql/ast/tree/PathNode.java b/src/org/hibernate/hql/ast/tree/PathNode.java new file mode 100644 index 0000000000..017923b471 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/PathNode.java @@ -0,0 +1,16 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +/** + * An AST node with a path property. This path property will be the fully qualified name. + * + * @author josh Nov 7, 2004 10:56:49 AM + */ +public interface PathNode { + /** + * Returns the full path name represented by the node. + * + * @return the full path name represented by the node. + */ + String getPath(); +} diff --git a/src/org/hibernate/hql/ast/tree/QueryNode.java b/src/org/hibernate/hql/ast/tree/QueryNode.java new file mode 100644 index 0000000000..e375b94825 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/QueryNode.java @@ -0,0 +1,133 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.type.Type; + +import antlr.SemanticException; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Defines a top-level AST node representing an HQL select statement. + * + * @author Joshua Davis + */ +public class QueryNode extends AbstractRestrictableStatement implements SelectExpression { + + private static final Log log = LogFactory.getLog( QueryNode.class ); + + private OrderByClause orderByClause; + + /** + * @see Statement#getStatementType() + */ + public int getStatementType() { + return HqlSqlTokenTypes.QUERY; + } + + /** + * @see Statement#needsExecutor() + */ + public boolean needsExecutor() { + return false; + } + + protected int getWhereClauseParentTokenType() { + return SqlTokenTypes.FROM; + } + + protected Log getLog() { + return log; + } + + /** + * Locate the select clause that is part of this select statement. + *

    + * Note, that this might return null as derived select clauses (i.e., no + * select clause at the HQL-level) get generated much later than when we + * get created; thus it depends upon lifecycle. + * + * @return Our select clause, or null. + */ + public final SelectClause getSelectClause() { + // Due to the complexity in initializing the SelectClause, do not generate one here. + // If it is not found; simply return null... + // + // Also, do not cache since it gets generated well after we are created. + return ( SelectClause ) ASTUtil.findTypeInChildren( this, SqlTokenTypes.SELECT_CLAUSE ); + } + + public final boolean hasOrderByClause() { + OrderByClause orderByClause = locateOrderByClause(); + return orderByClause != null && orderByClause.getNumberOfChildren() > 0; + } + + public final OrderByClause getOrderByClause() { + if ( orderByClause == null ) { + orderByClause = locateOrderByClause(); + + // if there is no order by, make one + if ( orderByClause == null ) { + log.debug( "getOrderByClause() : Creating a new ORDER BY clause" ); + orderByClause = ( OrderByClause ) ASTUtil.create( getWalker().getASTFactory(), SqlTokenTypes.ORDER, "ORDER" ); + + // Find the WHERE; if there is no WHERE, find the FROM... + AST prevSibling = ASTUtil.findTypeInChildren( this, SqlTokenTypes.WHERE ); + if ( prevSibling == null ) { + prevSibling = ASTUtil.findTypeInChildren( this, SqlTokenTypes.FROM ); + } + + // Now, inject the newly built ORDER BY into the tree + orderByClause.setNextSibling( prevSibling.getNextSibling() ); + prevSibling.setNextSibling( orderByClause ); + } + } + return orderByClause; + } + + private OrderByClause locateOrderByClause() { + return ( OrderByClause ) ASTUtil.findTypeInChildren( this, SqlTokenTypes.ORDER ); + } + + + private String alias; + + public String getAlias() { + return alias; + } + + public FromElement getFromElement() { + return null; + } + + public boolean isConstructor() { + return false; + } + + public boolean isReturnableEntity() throws SemanticException { + return false; + } + + public boolean isScalar() throws SemanticException { + return true; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public void setScalarColumnText(int i) throws SemanticException { + ColumnHelper.generateSingleScalarColumn( this, i ); + } + + public Type getDataType() { + return ( (SelectExpression) getSelectClause().getFirstSelectExpression() ).getDataType(); + } + +} diff --git a/src/org/hibernate/hql/ast/tree/ResolvableNode.java b/src/org/hibernate/hql/ast/tree/ResolvableNode.java new file mode 100644 index 0000000000..30a94910bd --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/ResolvableNode.java @@ -0,0 +1,38 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * The contract for expression sub-trees that can resolve themselves. + * + * @author josh Sep 25, 2004 11:27:36 AM + */ +public interface ResolvableNode { + /** + * Does the work of resolving an identifier or a dot + */ + void resolve(boolean generateJoin, boolean implicitJoin, String classAlias, AST parent) throws SemanticException; + + /** + * Does the work of resolving an identifier or a dot, but without a parent node + */ + void resolve(boolean generateJoin, boolean implicitJoin, String classAlias) throws SemanticException; + + /** + * Does the work of resolving an identifier or a dot, but without a parent node or alias + */ + void resolve(boolean generateJoin, boolean implicitJoin) throws SemanticException; + + /** + * Does the work of resolving inside of the scope of a function call + */ + void resolveInFunctionCall(boolean generateJoin, boolean implicitJoin) throws SemanticException; + + /** + * Does the work of resolving an an index []. + */ + void resolveIndex(AST parent) throws SemanticException; + +} diff --git a/src/org/hibernate/hql/ast/tree/RestrictableStatement.java b/src/org/hibernate/hql/ast/tree/RestrictableStatement.java new file mode 100644 index 0000000000..bd720007fb --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/RestrictableStatement.java @@ -0,0 +1,41 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import antlr.collections.AST; + +/** + * Type definition for Statements which are restrictable via a where-clause (and + * thus also having a from-clause). + * + * @author Steve Ebersole + */ +public interface RestrictableStatement extends Statement { + /** + * Retreives the from-clause in effect for this statement. + * + * @return The from-clause for this statement; could be null if the from-clause + * has not yet been parsed/generated. + */ + public FromClause getFromClause(); + + /** + * Does this statement tree currently contain a where clause? + * + * @return True if a where-clause is found in the statement tree and + * that where clause actually defines restrictions; false otherwise. + */ + public boolean hasWhereClause(); + + /** + * Retreives the where-clause defining the restriction(s) in effect for + * this statement. + *

    + * Note that this will generate a where-clause if one was not found, so caution + * needs to taken prior to calling this that restrictions will actually exist + * in the resulting statement tree (otherwise "unexpected end of subtree" errors + * might occur during rendering). + * + * @return The where clause. + */ + public AST getWhereClause(); +} diff --git a/src/org/hibernate/hql/ast/tree/SelectClause.java b/src/org/hibernate/hql/ast/tree/SelectClause.java new file mode 100644 index 0000000000..2011af0248 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/SelectClause.java @@ -0,0 +1,443 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import java.lang.reflect.Constructor; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.util.ASTAppender; +import org.hibernate.hql.ast.util.ASTIterator; +import org.hibernate.hql.ast.util.ASTPrinter; +import org.hibernate.hql.ast.QuerySyntaxException; +import org.hibernate.type.Type; +import org.hibernate.QueryException; + +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * Represents the list of expressions in a SELECT clause. + * + * @author josh Sep 21, 2004 7:53:55 AM + */ +public class SelectClause extends SelectExpressionList { + + private boolean prepared = false; + private boolean scalarSelect; + + private List fromElementsForLoad = new ArrayList(); + //private Type[] sqlResultTypes; + private Type[] queryReturnTypes; + private String[][] columnNames; + private ConstructorNode constructorNode; + private List collectionFromElements; + private String[] aliases; + + /** + * Does this SelectClause represent a scalar query + * + * @return True if this is a scalara select clause; false otherwise. + */ + public boolean isScalarSelect() { + return scalarSelect; + } + + public boolean isDistinct() { + return getFirstChild() != null && getFirstChild().getType() == SqlTokenTypes.DISTINCT; + } + + /** + * FromElements which need to be accounted for in the load phase (either for return or for fetch). + * + * @return List of appropriate FromElements. + */ + public List getFromElementsForLoad() { + return fromElementsForLoad; + } + + /* + * The types represented in the SQL result set. + * + * @return The types represented in the SQL result set. + */ + /*public Type[] getSqlResultTypes() { + return sqlResultTypes; + }*/ + + /** + * The types actually being returned from this query at the "object level". + * + * @return The query return types. + */ + public Type[] getQueryReturnTypes() { + return queryReturnTypes; + } + + /** + * The HQL aliases, or generated aliases + */ + public String[] getQueryReturnAliases() { + return aliases; + } + + /** + * The column alias names being used in the generated SQL. + * + * @return The SQL column aliases. + */ + public String[][] getColumnNames() { + return columnNames; + } + + /** + * The constructor to use for dynamic instantiation queries. + * + * @return The appropriate Constructor reference, or null if not a + * dynamic instantiation query. + */ + public Constructor getConstructor() { + return constructorNode == null ? null : constructorNode.getConstructor(); + } + + public boolean isMap() { + return constructorNode == null ? false : constructorNode.isMap(); + } + + public boolean isList() { + return constructorNode == null ? false : constructorNode.isList(); + } + + /** + * Prepares an explicitly defined select clause. + * + * @param fromClause The from clause linked to this select clause. + * @throws SemanticException + */ + public void initializeExplicitSelectClause(FromClause fromClause) throws SemanticException { + if ( prepared ) { + throw new IllegalStateException( "SelectClause was already prepared!" ); + } + + //explicit = true; // This is an explict Select. + //ArrayList sqlResultTypeList = new ArrayList(); + ArrayList queryReturnTypeList = new ArrayList(); + + // First, collect all of the select expressions. + // NOTE: This must be done *before* invoking setScalarColumnText() because setScalarColumnText() + // changes the AST!!! + SelectExpression[] selectExpressions = collectSelectExpressions(); + + for ( int i = 0; i < selectExpressions.length; i++ ) { + SelectExpression expr = selectExpressions[i]; + + if ( expr.isConstructor() ) { + constructorNode = ( ConstructorNode ) expr; + List constructorArgumentTypeList = constructorNode.getConstructorArgumentTypeList(); + //sqlResultTypeList.addAll( constructorArgumentTypeList ); + queryReturnTypeList.addAll( constructorArgumentTypeList ); + scalarSelect = true; + } + else { + Type type = expr.getDataType(); + if ( type == null ) { + throw new IllegalStateException( "No data type for node: " + expr.getClass().getName() + " " + + new ASTPrinter( SqlTokenTypes.class ).showAsString( ( AST ) expr, "" ) ); + } + //sqlResultTypeList.add( type ); + + // If the data type is not an association type, it could not have been in the FROM clause. + if ( expr.isScalar() ) { + scalarSelect = true; + } + + if ( isReturnableEntity( expr ) ) { + fromElementsForLoad.add( expr.getFromElement() ); + } + + // Always add the type to the return type list. + queryReturnTypeList.add( type ); + } + } + + //init the aliases, after initing the constructornode + initAliases(selectExpressions); + + if ( !getWalker().isShallowQuery() ) { + // add the fetched entities + List fromElements = fromClause.getProjectionList(); + + ASTAppender appender = new ASTAppender( getASTFactory(), this ); // Get ready to start adding nodes. + int size = fromElements.size(); + + Iterator iterator = fromElements.iterator(); + for ( int k = 0; iterator.hasNext(); k++ ) { + FromElement fromElement = ( FromElement ) iterator.next(); + + if ( fromElement.isFetch() ) { + FromElement origin = null; + if ( fromElement.getRealOrigin() == null ) { + // work around that crazy issue where the tree contains + // "empty" FromElements (no text); afaict, this is caused + // by FromElementFactory.createCollectionJoin() + if ( fromElement.getOrigin() == null ) { + throw new QueryException( "Unable to determine origin of join fetch [" + fromElement.getDisplayText() + "]" ); + } + else { + origin = fromElement.getOrigin(); + } + } + else { + origin = fromElement.getRealOrigin(); + } + if ( !fromElementsForLoad.contains( origin ) ) { + throw new QueryException( + "query specified join fetching, but the owner " + + "of the fetched association was not present in the select list " + + "[" + fromElement.getDisplayText() + "]" + ); + } + Type type = fromElement.getSelectType(); + addCollectionFromElement( fromElement ); + if ( type != null ) { + boolean collectionOfElements = fromElement.isCollectionOfValuesOrComponents(); + if ( !collectionOfElements ) { + // Add the type to the list of returned sqlResultTypes. + fromElement.setIncludeSubclasses( true ); + fromElementsForLoad.add( fromElement ); + //sqlResultTypeList.add( type ); + // Generate the select expression. + String text = fromElement.renderIdentifierSelect( size, k ); + SelectExpressionImpl generatedExpr = ( SelectExpressionImpl ) appender.append( SqlTokenTypes.SELECT_EXPR, text, false ); + if ( generatedExpr != null ) { + generatedExpr.setFromElement( fromElement ); + } + } + } + } + } + + // generate id select fragment and then property select fragment for + // each expression, just like generateSelectFragments(). + renderNonScalarSelects( collectSelectExpressions(), fromClause ); + } + + if ( scalarSelect || getWalker().isShallowQuery() ) { + // If there are any scalars (non-entities) selected, render the select column aliases. + renderScalarSelects( selectExpressions, fromClause ); + } + + finishInitialization( /*sqlResultTypeList,*/ queryReturnTypeList ); + } + + private void finishInitialization(/*ArrayList sqlResultTypeList,*/ ArrayList queryReturnTypeList) { + //sqlResultTypes = ( Type[] ) sqlResultTypeList.toArray( new Type[sqlResultTypeList.size()] ); + queryReturnTypes = ( Type[] ) queryReturnTypeList.toArray( new Type[queryReturnTypeList.size()] ); + initializeColumnNames(); + prepared = true; + } + + private void initializeColumnNames() { + // Generate an 2d array of column names, the first dimension is parallel with the + // return types array. The second dimension is the list of column names for each + // type. + + // todo: we should really just collect these from the various SelectExpressions, rather than regenerating here + columnNames = getSessionFactoryHelper().generateColumnNames( queryReturnTypes ); + } + + /** + * Prepares a derived (i.e., not explicitly defined in the query) select clause. + * + * @param fromClause The from clause to which this select clause is linked. + */ + public void initializeDerivedSelectClause(FromClause fromClause) throws SemanticException { + if ( prepared ) { + throw new IllegalStateException( "SelectClause was already prepared!" ); + } + + //Used to be tested by the TCK but the test is no longer here +// if ( getSessionFactoryHelper().isStrictJPAQLComplianceEnabled() && !getWalker().isSubQuery() ) { +// // NOTE : the isSubQuery() bit is a temporary hack... +// throw new QuerySyntaxException( "JPA-QL compliance requires select clause" ); +// } + List fromElements = fromClause.getProjectionList(); + + ASTAppender appender = new ASTAppender( getASTFactory(), this ); // Get ready to start adding nodes. + int size = fromElements.size(); + ArrayList sqlResultTypeList = new ArrayList( size ); + ArrayList queryReturnTypeList = new ArrayList( size ); + + Iterator iterator = fromElements.iterator(); + for ( int k = 0; iterator.hasNext(); k++ ) { + FromElement fromElement = ( FromElement ) iterator.next(); + Type type = fromElement.getSelectType(); + + addCollectionFromElement( fromElement ); + + if ( type != null ) { + boolean collectionOfElements = fromElement.isCollectionOfValuesOrComponents(); + if ( !collectionOfElements ) { + if ( !fromElement.isFetch() ) { + // Add the type to the list of returned sqlResultTypes. + queryReturnTypeList.add( type ); + } + fromElementsForLoad.add( fromElement ); + sqlResultTypeList.add( type ); + // Generate the select expression. + String text = fromElement.renderIdentifierSelect( size, k ); + SelectExpressionImpl generatedExpr = ( SelectExpressionImpl ) appender.append( SqlTokenTypes.SELECT_EXPR, text, false ); + if ( generatedExpr != null ) { + generatedExpr.setFromElement( fromElement ); + } + } + } + } + + // Get all the select expressions (that we just generated) and render the select. + SelectExpression[] selectExpressions = collectSelectExpressions(); + + if ( getWalker().isShallowQuery() ) { + renderScalarSelects( selectExpressions, fromClause ); + } + else { + renderNonScalarSelects( selectExpressions, fromClause ); + } + finishInitialization( /*sqlResultTypeList,*/ queryReturnTypeList ); + } + + public static boolean VERSION2_SQL = false; + + private void addCollectionFromElement(FromElement fromElement) { + if ( fromElement.isFetch() ) { + if ( fromElement.isCollectionJoin() || fromElement.getQueryableCollection() != null ) { + String suffix; + if (collectionFromElements==null) { + collectionFromElements = new ArrayList(); + suffix = VERSION2_SQL ? "__" : "0__"; + } + else { + suffix = Integer.toString( collectionFromElements.size() ) + "__"; + } + collectionFromElements.add( fromElement ); + fromElement.setCollectionSuffix( suffix ); + } + } + } + + protected AST getFirstSelectExpression() { + AST n = getFirstChild(); + // Skip 'DISTINCT' and 'ALL', so we return the first expression node. + while ( n != null && ( n.getType() == SqlTokenTypes.DISTINCT || n.getType() == SqlTokenTypes.ALL ) ) { + n = n.getNextSibling(); + } + return n; + } + + private boolean isReturnableEntity(SelectExpression selectExpression) throws SemanticException { + FromElement fromElement = selectExpression.getFromElement(); + boolean isFetchOrValueCollection = fromElement != null && + ( fromElement.isFetch() || fromElement.isCollectionOfValuesOrComponents() ); + if ( isFetchOrValueCollection ) { + return false; + } + else { + return selectExpression.isReturnableEntity(); + } + } + + private void renderScalarSelects(SelectExpression[] se, FromClause currentFromClause) throws SemanticException { + if ( !currentFromClause.isSubQuery() ) { + for ( int i = 0; i < se.length; i++ ) { + SelectExpression expr = se[i]; + expr.setScalarColumnText( i ); // Create SQL_TOKEN nodes for the columns. + } + } + } + + private void initAliases(SelectExpression[] selectExpressions) { + if (constructorNode==null) { + aliases = new String[selectExpressions.length]; + for ( int i=0; i 0 && originalText == null ) { + originalText = s; + } + } + + public String getOriginalText() { + return originalText; + } + + public Type getDataType() { + return dataType; + } + + public void setDataType(Type dataType) { + this.dataType = dataType; + } + +} diff --git a/src/org/hibernate/hql/ast/tree/Statement.java b/src/org/hibernate/hql/ast/tree/Statement.java new file mode 100644 index 0000000000..ff89c921cb --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/Statement.java @@ -0,0 +1,36 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.ast.HqlSqlWalker; + +/** + * Common interface modeling the different HQL statements (i.e., INSERT, UPDATE, DELETE, SELECT). + * + * @author Steve Ebersole + */ +public interface Statement { + + /** + * Retreive the "phase 2" walker which generated this statement tree. + * + * @return The HqlSqlWalker instance which generated this statement tree. + */ + public HqlSqlWalker getWalker(); + + /** + * Return the main token type representing the type of this statement. + * + * @return The corresponding token type. + */ + public int getStatementType(); + + /** + * Does this statement require the StatementExecutor? + *

    + * Essentially, at the JDBC level, does this require an executeUpdate()? + * + * @return True if this statement should be handed off to the + * StatementExecutor to be executed; false otherwise. + */ + public boolean needsExecutor(); +} diff --git a/src/org/hibernate/hql/ast/tree/UnaryArithmeticNode.java b/src/org/hibernate/hql/ast/tree/UnaryArithmeticNode.java new file mode 100644 index 0000000000..96338e4234 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/UnaryArithmeticNode.java @@ -0,0 +1,27 @@ +//$Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.ast.util.ColumnHelper; +import org.hibernate.type.Type; + +import antlr.SemanticException; + +public class UnaryArithmeticNode extends AbstractSelectExpression implements UnaryOperatorNode { + + public Type getDataType() { + return ( ( SqlNode ) getOperand() ).getDataType(); + } + + public void setScalarColumnText(int i) throws SemanticException { + ColumnHelper.generateSingleScalarColumn( this, i ); + } + + public void initialize() { + // nothing to do; even if the operand is a parameter, no way we could + // infer an appropriate expected type here + } + + public Node getOperand() { + return ( Node ) getFirstChild(); + } +} diff --git a/src/org/hibernate/hql/ast/tree/UnaryLogicOperatorNode.java b/src/org/hibernate/hql/ast/tree/UnaryLogicOperatorNode.java new file mode 100644 index 0000000000..0064e9adf7 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/UnaryLogicOperatorNode.java @@ -0,0 +1,23 @@ +package org.hibernate.hql.ast.tree; + +import org.hibernate.type.Type; +import org.hibernate.Hibernate; + +/** + * @author Steve Ebersole + */ +public class UnaryLogicOperatorNode extends SqlNode implements UnaryOperatorNode { + public Node getOperand() { + return ( Node ) getFirstChild(); + } + + public void initialize() { + // nothing to do; even if the operand is a parameter, no way we could + // infer an appropriate expected type here + } + + public Type getDataType() { + // logic operators by definition resolve to booleans + return Hibernate.BOOLEAN; + } +} diff --git a/src/org/hibernate/hql/ast/tree/UnaryOperatorNode.java b/src/org/hibernate/hql/ast/tree/UnaryOperatorNode.java new file mode 100644 index 0000000000..74b9a9abdb --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/UnaryOperatorNode.java @@ -0,0 +1,15 @@ +package org.hibernate.hql.ast.tree; + +/** + * Contract for nodes representing unary operators. + * + * @author Steve Ebersole + */ +public interface UnaryOperatorNode extends OperatorNode { + /** + * Retrievs the node representing the operator's single operand. + * + * @return The operator's single operand. + */ + public Node getOperand(); +} diff --git a/src/org/hibernate/hql/ast/tree/UpdateStatement.java b/src/org/hibernate/hql/ast/tree/UpdateStatement.java new file mode 100644 index 0000000000..3b08118ad4 --- /dev/null +++ b/src/org/hibernate/hql/ast/tree/UpdateStatement.java @@ -0,0 +1,47 @@ +// $Id$ +package org.hibernate.hql.ast.tree; + +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; + +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Defines a top-level AST node representing an HQL update statement. + * + * @author Steve Ebersole + */ +public class UpdateStatement extends AbstractRestrictableStatement { + + private static final Log log = LogFactory.getLog( UpdateStatement.class ); + + /** + * @see org.hibernate.hql.ast.tree.Statement#getStatementType() + */ + public int getStatementType() { + return SqlTokenTypes.UPDATE; + } + + /** + * @see org.hibernate.hql.ast.tree.Statement#needsExecutor() + */ + public boolean needsExecutor() { + return true; + } + + protected int getWhereClauseParentTokenType() { + return SqlTokenTypes.SET; + } + + protected Log getLog() { + return log; + } + + public AST getSetClause() { + return ASTUtil.findTypeInChildren( this, HqlSqlTokenTypes.SET ); + } +} diff --git a/src/org/hibernate/hql/ast/util/ASTAppender.java b/src/org/hibernate/hql/ast/util/ASTAppender.java new file mode 100644 index 0000000000..0e964f0d04 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/ASTAppender.java @@ -0,0 +1,46 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import antlr.ASTFactory; +import antlr.collections.AST; + +/** + * Appends child nodes to a parent efficiently. + * + * @author josh Jul 24, 2004 8:28:23 AM + */ +public class ASTAppender { + private AST parent; + private AST last; + private ASTFactory factory; + + public ASTAppender(ASTFactory factory, AST parent) { + this( parent ); + this.factory = factory; + } + + public ASTAppender(AST parent) { + this.parent = parent; + this.last = ASTUtil.getLastChild( parent ); + } + + public AST append(int type, String text, boolean appendIfEmpty) { + if ( text != null && ( appendIfEmpty || text.length() > 0 ) ) { + return append( factory.create( type, text ) ); + } + else { + return null; + } + } + + public AST append(AST child) { + if ( last == null ) { + parent.setFirstChild( child ); + } + else { + last.setNextSibling( child ); + } + last = child; + return last; + } +} diff --git a/src/org/hibernate/hql/ast/util/ASTIterator.java b/src/org/hibernate/hql/ast/util/ASTIterator.java new file mode 100644 index 0000000000..3f3ca72b3e --- /dev/null +++ b/src/org/hibernate/hql/ast/util/ASTIterator.java @@ -0,0 +1,70 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import java.util.Iterator; +import java.util.LinkedList; + +import antlr.collections.AST; + +/** + * Depth first iteration of an ANTLR AST. + * + * @author josh Sep 25, 2004 7:44:39 AM + */ +public class ASTIterator implements Iterator { + private AST next, current; + private LinkedList parents = new LinkedList(); + + public void remove() { + throw new UnsupportedOperationException( "remove() is not supported" ); + } + + public boolean hasNext() { + return next != null; + } + + public Object next() { + return nextNode(); + } + + public ASTIterator(AST tree) { + next = tree; + down(); + } + + public AST nextNode() { + current = next; + if ( next != null ) { + AST nextSibling = next.getNextSibling(); + if ( nextSibling == null ) { + next = pop(); + } + else { + next = nextSibling; + down(); + } + } + return current; + } + + private void down() { + while ( next != null && next.getFirstChild() != null ) { + push( next ); + next = next.getFirstChild(); + } + } + + private void push(AST parent) { + parents.addFirst( parent ); + } + + private AST pop() { + if ( parents.size() == 0 ) { + return null; + } + else { + return ( AST ) parents.removeFirst(); + } + } + +} diff --git a/src/org/hibernate/hql/ast/util/ASTParentsFirstIterator.java b/src/org/hibernate/hql/ast/util/ASTParentsFirstIterator.java new file mode 100644 index 0000000000..3f8ddcd48a --- /dev/null +++ b/src/org/hibernate/hql/ast/util/ASTParentsFirstIterator.java @@ -0,0 +1,73 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import java.util.Iterator; +import java.util.LinkedList; + +import antlr.collections.AST; + +/** + * Depth first iteration of an ANTLR AST. + * + * @author josh Sep 25, 2004 7:44:39 AM + */ +public class ASTParentsFirstIterator implements Iterator { + private AST next, current, tree; + private LinkedList parents = new LinkedList(); + + public void remove() { + throw new UnsupportedOperationException( "remove() is not supported" ); + } + + public boolean hasNext() { + return next != null; + } + + public Object next() { + return nextNode(); + } + + public ASTParentsFirstIterator(AST tree) { + this.tree = next = tree; + } + + public AST nextNode() { + current = next; + if ( next != null ) { + AST child = next.getFirstChild(); + if ( child == null ) { + AST sibling = next.getNextSibling(); + if ( sibling == null ) { + AST parent = pop(); + while ( parent != null && parent.getNextSibling() == null ) + parent = pop(); + next = ( parent != null ) ? parent.getNextSibling() : null; + } + else { + next = sibling; + } + } + else { + if ( next != tree ) { + push( next ); + } + next = child; + } + } + return current; + } + + private void push(AST parent) { + parents.addFirst( parent ); + } + + private AST pop() { + if ( parents.size() == 0 ) { + return null; + } + else { + return ( AST ) parents.removeFirst(); + } + } + +} diff --git a/src/org/hibernate/hql/ast/util/ASTPrinter.java b/src/org/hibernate/hql/ast/util/ASTPrinter.java new file mode 100644 index 0000000000..1f7a1cdf4a --- /dev/null +++ b/src/org/hibernate/hql/ast/util/ASTPrinter.java @@ -0,0 +1,262 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +import org.hibernate.hql.ast.tree.DisplayableNode; +import org.hibernate.util.StringHelper; + +import antlr.collections.AST; + +/** + * An 'ASCII art' AST printer for debugging ANTLR grammars. + * + * @author Joshua Davis (pgmjsd@sourceforge.net) + */ +public class ASTPrinter { + private Map tokenTypeNamesByTokenType; + private Class tokenTypeConstants; + private boolean showClassNames = true; + + /** + * Constructs an org.hibernate.hql.antlr.ASTPrinter, given the class that contains the token type + * constants (typically the '{grammar}TokenTypes' interface generated by + * ANTLR). + * + * @param tokenTypeConstants The class with token type constants in it. + */ + public ASTPrinter(Class tokenTypeConstants) { + this.tokenTypeConstants = tokenTypeConstants; + } + + /** + * Returns true if the node class names will be displayed. + * + * @return true if the node class names will be displayed. + */ + public boolean isShowClassNames() { + return showClassNames; + } + + /** + * Enables or disables AST node class name display. + * + * @param showClassNames true to enable class name display, false to disable + */ + public void setShowClassNames(boolean showClassNames) { + this.showClassNames = showClassNames; + } + + /** + * Prints the AST in 'ASCII art' tree form to the specified print stream. + * + * @param ast The AST to print. + * @param out The print stream. + */ + private void showAst(AST ast, PrintStream out) { + showAst( ast, new PrintWriter( out ) ); + } + + /** + * Prints the AST in 'ASCII art' tree form to the specified print writer. + * + * @param ast The AST to print. + * @param pw The print writer. + */ + public void showAst(AST ast, PrintWriter pw) { + ArrayList parents = new ArrayList(); + showAst( parents, pw, ast ); + pw.flush(); + } + + /** + * Prints the AST in 'ASCII art' tree form into a string. + * + * @param ast The AST to display. + * @param header The header for the display. + * @return The AST in 'ASCII art' form, as a string. + */ + public String showAsString(AST ast, String header) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + PrintStream ps = new PrintStream( baos ); + ps.println( header ); + showAst( ast, ps ); + ps.flush(); + return new String( baos.toByteArray() ); + } + + /** + * Get a single token type name in the specified set of token type constants (interface). + * + * @param tokenTypeConstants Token type constants interface (e.g. HqlSqlTokenTypes.class). + * @param type The token type ( typically from ast.getType() ). + * @return The token type name, *or* the integer value if the name could not be found for some reason. + */ + public static String getConstantName(Class tokenTypeConstants, int type) { + String tokenTypeName = null; + if ( tokenTypeConstants != null ) { + Field[] fields = tokenTypeConstants.getFields(); + for ( int i = 0; i < fields.length; i++ ) { + Field field = fields[i]; + tokenTypeName = getTokenTypeName( field, type, true ); + if ( tokenTypeName != null ) { + break; // Stop if found. + } + } // for + } // if type constants were provided + + // Use the integer value if no token type name was found + if ( tokenTypeName == null ) { + tokenTypeName = Integer.toString( type ); + } + + return tokenTypeName; + } + + private static String getTokenTypeName(Field field, int type, boolean checkType) { + if ( Modifier.isStatic( field.getModifiers() ) ) { + try { + Object value = field.get( null ); + if ( !checkType ) { + return field.getName(); + } + else if ( value instanceof Integer ) { + Integer integer = ( Integer ) value; + if ( integer.intValue() == type ) { + return field.getName(); + } + } // if value is an integer + } // try + catch ( IllegalArgumentException ignore ) { + } + catch ( IllegalAccessException ignore ) { + } + } // if the field is static + return null; + } + + /** + * Returns the token type name for the given token type. + * + * @param type The token type. + * @return String - The token type name from the token type constant class, + * or just the integer as a string if none exists. + */ + private String getTokenTypeName(int type) { + // If the class with the constants in it was not supplied, just + // use the integer token type as the token type name. + if ( tokenTypeConstants == null ) { + return Integer.toString( type ); + } + + // Otherwise, create a type id -> name map from the class if it + // hasn't already been created. + if ( tokenTypeNamesByTokenType == null ) { + Field[] fields = tokenTypeConstants.getFields(); + tokenTypeNamesByTokenType = new HashMap(); + String tokenTypeName = null; + for ( int i = 0; i < fields.length; i++ ) { + Field field = fields[i]; + tokenTypeName = getTokenTypeName( field, type, false ); + if ( tokenTypeName != null ) { + try { + tokenTypeNamesByTokenType.put( field.get( null ), field.getName() ); + } + catch ( IllegalAccessException ignore ) { + } + } + } // for + } // if the map hasn't been created. + + return ( String ) tokenTypeNamesByTokenType.get( new Integer( type ) ); + } + + private void showAst(ArrayList parents, PrintWriter pw, AST ast) { + if ( ast == null ) { + pw.println( "AST is null!" ); + return; + } + + for ( int i = 0; i < parents.size(); i++ ) { + AST parent = ( AST ) parents.get( i ); + if ( parent.getNextSibling() == null ) { + + pw.print( " " ); + } + else { + pw.print( " | " ); + } + } + + if ( ast.getNextSibling() == null ) { + pw.print( " \\-" ); + } + else { + pw.print( " +-" ); + } + + showNode( pw, ast ); + + ArrayList newParents = new ArrayList( parents ); + newParents.add( ast ); + for ( AST child = ast.getFirstChild(); child != null; child = child.getNextSibling() ) { + showAst( newParents, pw, child ); + } + newParents.clear(); + } + + private void showNode(PrintWriter pw, AST ast) { + String s = nodeToString( ast, isShowClassNames() ); + pw.println( s ); + } + + public String nodeToString(AST ast, boolean showClassName) { + if ( ast == null ) { + return "{null}"; + } + StringBuffer buf = new StringBuffer(); + buf.append( "[" ).append( getTokenTypeName( ast.getType() ) ).append( "] " ); + if ( showClassName ) { + buf.append( StringHelper.unqualify( ast.getClass().getName() ) ).append( ": " ); + } + + buf.append( "'" ); + String text = ast.getText(); + appendEscapedMultibyteChars(text, buf); + buf.append( "'" ); + if ( ast instanceof DisplayableNode ) { + DisplayableNode displayableNode = ( DisplayableNode ) ast; + // Add a space before the display text. + buf.append( " " ).append( displayableNode.getDisplayText() ); + } + String s = buf.toString(); + return s; + } + + public static void appendEscapedMultibyteChars(String text, StringBuffer buf) { + char[] chars = text.toCharArray(); + for (int i = 0; i < chars.length; i++) { + char aChar = chars[i]; + if (aChar > 256) { + buf.append("\\u"); + buf.append(Integer.toHexString(aChar)); + } + else + buf.append(aChar); + } + } + + public static String escapeMultibyteChars(String text) + { + StringBuffer buf = new StringBuffer(); + appendEscapedMultibyteChars(text,buf); + return buf.toString(); + } +} diff --git a/src/org/hibernate/hql/ast/util/ASTUtil.java b/src/org/hibernate/hql/ast/util/ASTUtil.java new file mode 100644 index 0000000000..8268c944cd --- /dev/null +++ b/src/org/hibernate/hql/ast/util/ASTUtil.java @@ -0,0 +1,360 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import java.util.ArrayList; +import java.util.List; + +import antlr.ASTFactory; +import antlr.collections.AST; +import antlr.collections.impl.ASTArray; + +/** + * Provides utility methods for AST traversal and manipulation. + * + * @author Joshua Davis (pgmjsd@sourceforge.net) + */ +public final class ASTUtil { + /** + * Private empty constructor. + * (or else checkstyle says: 'warning: Utility classes should not have a public or default constructor.') + * + * @deprecated (tell clover to ignore this) + */ + private ASTUtil() { + } + + /** + * Creates a single node AST. + * + * @param astFactory The factory. + * @param type The node type. + * @param text The node text. + * @return AST - A single node tree. + */ + public static AST create(ASTFactory astFactory, int type, String text) { + AST node = astFactory.create( type, text ); + return node; + } + + /** + * Creates a single node AST as a sibling. + * + * @param astFactory The factory. + * @param type The node type. + * @param text The node text. + * @param prevSibling The previous sibling. + * @return AST - A single node tree. + */ + public static AST createSibling(ASTFactory astFactory, int type, String text, AST prevSibling) { + AST node = astFactory.create( type, text ); + node.setNextSibling( prevSibling.getNextSibling() ); + prevSibling.setNextSibling( node ); + return node; + } + + public static AST insertSibling(AST node, AST prevSibling) { + node.setNextSibling( prevSibling.getNextSibling() ); + prevSibling.setNextSibling( node ); + return node; + } + + /** + * Creates a 'binary operator' subtree, given the information about the + * parent and the two child nodex. + * + * @param factory The AST factory. + * @param parentType The type of the parent node. + * @param parentText The text of the parent node. + * @param child1 The first child. + * @param child2 The second child. + * @return AST - A new sub-tree of the form "(parent child1 child2)" + */ + public static AST createBinarySubtree(ASTFactory factory, int parentType, String parentText, AST child1, AST child2) { + ASTArray array = createAstArray( factory, 3, parentType, parentText, child1 ); + array.add( child2 ); + return factory.make( array ); + } + + /** + * Creates a single parent of the specified child (i.e. a 'unary operator' + * subtree). + * + * @param factory The AST factory. + * @param parentType The type of the parent node. + * @param parentText The text of the parent node. + * @param child The child. + * @return AST - A new sub-tree of the form "(parent child)" + */ + public static AST createParent(ASTFactory factory, int parentType, String parentText, AST child) { + ASTArray array = createAstArray( factory, 2, parentType, parentText, child ); + return factory.make( array ); + } + + public static AST createTree(ASTFactory factory, AST[] nestedChildren) { + AST[] array = new AST[2]; + int limit = nestedChildren.length - 1; + for ( int i = limit; i >= 0; i-- ) { + if ( i != limit ) { + array[1] = nestedChildren[i + 1]; + array[0] = nestedChildren[i]; + factory.make( array ); + } + } + return array[0]; + } + + /** + * Finds the first node of the specified type in the chain of children. + * + * @param parent The parent + * @param type The type to find. + * @return The first node of the specified type, or null if not found. + */ + public static AST findTypeInChildren(AST parent, int type) { + AST n = parent.getFirstChild(); + while ( n != null && n.getType() != type ) { + n = n.getNextSibling(); + } + return n; + } + + /** + * Returns the last direct child of 'n'. + * + * @param n The parent + * @return The last direct child of 'n'. + */ + public static AST getLastChild(AST n) { + return getLastSibling( n.getFirstChild() ); + } + + /** + * Returns the last sibling of 'a'. + * + * @param a The sibling. + * @return The last sibling of 'a'. + */ + private static AST getLastSibling(AST a) { + AST last = null; + while ( a != null ) { + last = a; + a = a.getNextSibling(); + } + return last; + } + + /** + * Returns the 'list' representation with some brackets around it for debugging. + * + * @param n The tree. + * @return The list representation of the tree. + */ + public static String getDebugString(AST n) { + StringBuffer buf = new StringBuffer(); + buf.append( "[ " ); + buf.append( ( n == null ) ? "{null}" : n.toStringTree() ); + buf.append( " ]" ); + return buf.toString(); + } + + /** + * Find the previous sibling in the parent for the given child. + * + * @param parent the parent node + * @param child the child to find the previous sibling of + * @return the previous sibling of the child + */ + public static AST findPreviousSibling(AST parent, AST child) { + AST prev = null; + AST n = parent.getFirstChild(); + while ( n != null ) { + if ( n == child ) { + return prev; + } + prev = n; + n = n.getNextSibling(); + } + throw new IllegalArgumentException( "Child not found in parent!" ); + } + + /** + * Determine if a given node (test) is a direct (throtle to one level down) + * child of another given node (fixture). + * + * @param fixture The node against which to testto be checked for children. + * @param test The node to be tested as being a child of the parent. + * @return True if test is contained in the fixtures's direct children; + * false otherwise. + */ + public static boolean isDirectChild(AST fixture, AST test) { + AST n = fixture.getFirstChild(); + while ( n != null ) { + if ( n == test ) { + return true; + } + n = n.getNextSibling(); + } + return false; + } + + /** + * Determine if a given node (test) is contained anywhere in the subtree + * of another given node (fixture). + * + * @param fixture The node against which to testto be checked for children. + * @param test The node to be tested as being a subtree child of the parent. + * @return True if child is contained in the parent's collection of children. + */ + public static boolean isSubtreeChild(AST fixture, AST test) { + AST n = fixture.getFirstChild(); + while ( n != null ) { + if ( n == test ) { + return true; + } + if ( n.getFirstChild() != null && isSubtreeChild( n, test ) ) { + return true; + } + n = n.getNextSibling(); + } + return false; + } + + /** + * Makes the child node a sibling of the parent, reconnecting all siblings. + * + * @param parent the parent + * @param child the child + */ + public static void makeSiblingOfParent(AST parent, AST child) { + AST prev = findPreviousSibling( parent, child ); + if ( prev != null ) { + prev.setNextSibling( child.getNextSibling() ); + } + else { // child == parent.getFirstChild() + parent.setFirstChild( child.getNextSibling() ); + } + child.setNextSibling( parent.getNextSibling() ); + parent.setNextSibling( child ); + } + + public static String getPathText(AST n) { + StringBuffer buf = new StringBuffer(); + getPathText( buf, n ); + return buf.toString(); + } + + private static void getPathText(StringBuffer buf, AST n) { + AST firstChild = n.getFirstChild(); + // If the node has a first child, recurse into the first child. + if ( firstChild != null ) { + getPathText( buf, firstChild ); + } + // Append the text of the current node. + buf.append( n.getText() ); + // If there is a second child (RHS), recurse into that child. + if ( firstChild != null && firstChild.getNextSibling() != null ) { + getPathText( buf, firstChild.getNextSibling() ); + } + } + + public static boolean hasExactlyOneChild(AST n) { + return n != null && n.getFirstChild() != null && n.getFirstChild().getNextSibling() == null; + } + + public static void appendSibling(AST n, AST s) { + while ( n.getNextSibling() != null ) { + n = n.getNextSibling(); + } + n.setNextSibling( s ); + } + + /** + * Inserts the child as the first child of the parent, all other children are shifted over to the 'right'. + * + * @param parent the parent + * @param child the new first child + */ + public static void insertChild(AST parent, AST child) { + if ( parent.getFirstChild() == null ) { + parent.setFirstChild( child ); + } + else { + AST n = parent.getFirstChild(); + parent.setFirstChild( child ); + child.setNextSibling( n ); + } + } + + /** + * Filters nodes out of a tree. + */ + public static interface FilterPredicate { + /** + * Returns true if the node should be filtered out. + * + * @param n The node. + * @return true if the node should be filtered out, false to keep the node. + */ + boolean exclude(AST n); + } + + /** + * A predicate that uses inclusion, rather than exclusion semantics. + */ + public abstract static class IncludePredicate implements FilterPredicate { + public final boolean exclude(AST node) { + return !include( node ); + } + + public abstract boolean include(AST node); + } + + private static ASTArray createAstArray(ASTFactory factory, int size, int parentType, String parentText, AST child1) { + ASTArray array = new ASTArray( size ); + array.add( factory.create( parentType, parentText ) ); + array.add( child1 ); + return array; + } + + public static List collectChildren(AST root, FilterPredicate predicate) { +// List children = new ArrayList(); +// collectChildren( children, root, predicate ); +// return children; + return new CollectingNodeVisitor( predicate ).collect( root ); + } + + private static class CollectingNodeVisitor implements NodeTraverser.VisitationStrategy { + private final FilterPredicate predicate; + private final List collectedNodes = new ArrayList(); + + public CollectingNodeVisitor(FilterPredicate predicate) { + this.predicate = predicate; + } + + public void visit(AST node) { + if ( predicate == null || !predicate.exclude( node ) ) { + collectedNodes.add( node ); + } + } + + public List getCollectedNodes() { + return collectedNodes; + } + + public List collect(AST root) { + NodeTraverser traverser = new NodeTraverser( this ); + traverser.traverseDepthFirst( root ); + return collectedNodes; + } + } + + private static void collectChildren(List children, AST root, FilterPredicate predicate) { + for ( AST n = root.getFirstChild(); n != null; n = n.getNextSibling() ) { + if ( predicate == null || !predicate.exclude( n ) ) { + children.add( n ); + } + collectChildren( children, n, predicate ); + } + } + +} diff --git a/src/org/hibernate/hql/ast/util/AliasGenerator.java b/src/org/hibernate/hql/ast/util/AliasGenerator.java new file mode 100644 index 0000000000..9b147a0464 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/AliasGenerator.java @@ -0,0 +1,22 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import org.hibernate.util.StringHelper; + +/** + * Generates class/table/column aliases during semantic analysis and SQL rendering. + *

    + * Its essential purpose is to keep an internal counter to ensure that the + * generated aliases are unique. + */ +public class AliasGenerator { + private int next = 0; + + private int nextCount() { + return next++; + } + + public String createName(String name) { + return StringHelper.generateAlias( name, nextCount() ); + } +} diff --git a/src/org/hibernate/hql/ast/util/ColumnHelper.java b/src/org/hibernate/hql/ast/util/ColumnHelper.java new file mode 100644 index 0000000000..1438c2a5d7 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/ColumnHelper.java @@ -0,0 +1,49 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import org.hibernate.hql.NameGenerator; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.tree.HqlSqlWalkerNode; + +import antlr.ASTFactory; +import antlr.collections.AST; + +/** + * Provides utility methods for dealing with arrays of SQL column names. + * + * @author josh Jan 3, 2005 9:08:47 AM + */ +public final class ColumnHelper { + + /** + * @deprecated (tell clover to filter this out) + */ + private ColumnHelper() { + } + + public static void generateSingleScalarColumn(HqlSqlWalkerNode node, int i) { + ASTFactory factory = node.getASTFactory(); + ASTUtil.createSibling( factory, SqlTokenTypes.SELECT_COLUMNS, " as " + NameGenerator.scalarName( i, 0 ), node ); + } + + /** + * Generates the scalar column AST nodes for a given array of SQL columns + */ + public static void generateScalarColumns(HqlSqlWalkerNode node, String sqlColumns[], int i) { + if ( sqlColumns.length == 1 ) { + generateSingleScalarColumn( node, i ); + } + else { + ASTFactory factory = node.getASTFactory(); + AST n = node; + n.setText( sqlColumns[0] ); // Use the DOT node to emit the first column name. + // Create the column names, folled by the column aliases. + for ( int j = 0; j < sqlColumns.length; j++ ) { + if ( j > 0 ) { + n = ASTUtil.createSibling( factory, SqlTokenTypes.SQL_TOKEN, sqlColumns[j], n ); + } + n = ASTUtil.createSibling( factory, SqlTokenTypes.SELECT_COLUMNS, " as " + NameGenerator.scalarName( i, j ), n ); + } + } + } +} diff --git a/src/org/hibernate/hql/ast/util/JoinProcessor.java b/src/org/hibernate/hql/ast/util/JoinProcessor.java new file mode 100644 index 0000000000..78116856ac --- /dev/null +++ b/src/org/hibernate/hql/ast/util/JoinProcessor.java @@ -0,0 +1,164 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.ListIterator; +import java.util.Collections; +import java.util.List; + +import org.hibernate.AssertionFailure; +import org.hibernate.engine.JoinSequence; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.QueryTranslatorImpl; +import org.hibernate.hql.ast.tree.FromClause; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.hql.ast.tree.QueryNode; +import org.hibernate.hql.ast.tree.DotNode; +import org.hibernate.sql.JoinFragment; +import org.hibernate.util.StringHelper; + +import antlr.ASTFactory; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Performs the post-processing of the join information gathered during semantic analysis. + * The join generating classes are complex, this encapsulates some of the JoinSequence-related + * code. + * + * @author Joshua Davis + */ +public class JoinProcessor implements SqlTokenTypes { + + private static final Log log = LogFactory.getLog( JoinProcessor.class ); + + private QueryTranslatorImpl queryTranslatorImpl; + private SyntheticAndFactory andFactory; + + /** + * Constructs a new JoinProcessor. + * + * @param astFactory The factory for AST node creation. + * @param queryTranslatorImpl The query translator. + */ + public JoinProcessor(ASTFactory astFactory, QueryTranslatorImpl queryTranslatorImpl) { + this.andFactory = new SyntheticAndFactory( astFactory ); + this.queryTranslatorImpl = queryTranslatorImpl; + } + + /** + * Translates an AST join type (i.e., the token type) into a JoinFragment.XXX join type. + * + * @param astJoinType The AST join type (from HqlSqlTokenTypes or SqlTokenTypes) + * @return a JoinFragment.XXX join type. + * @see JoinFragment + * @see SqlTokenTypes + */ + public static int toHibernateJoinType(int astJoinType) { + switch ( astJoinType ) { + case LEFT_OUTER: + return JoinFragment.LEFT_OUTER_JOIN; + case INNER: + return JoinFragment.INNER_JOIN; + case RIGHT_OUTER: + return JoinFragment.RIGHT_OUTER_JOIN; + default: + throw new AssertionFailure( "undefined join type " + astJoinType ); + } + } + + public void processJoins(QueryNode query, boolean inSubquery) { + final FromClause fromClause = query.getFromClause(); + + final List fromElements; + if ( DotNode.useThetaStyleImplicitJoins ) { + // for regression testing against output from the old parser... + // found it easiest to simply reorder the FromElements here into ascending order + // in terms of injecting them into the resulting sql ast in orders relative to those + // expected by the old parser; this is definitely another of those "only needed + // for regression purposes". The SyntheticAndFactory, then, simply injects them as it + // encounters them. + fromElements = new ArrayList(); + ListIterator liter = fromClause.getFromElements().listIterator( fromClause.getFromElements().size() ); + while ( liter.hasPrevious() ) { + fromElements.add( liter.previous() ); + } + } + else { + fromElements = fromClause.getFromElements(); + } + + // Iterate through the alias,JoinSequence pairs and generate SQL token nodes. + Iterator iter = fromElements.iterator(); + while ( iter.hasNext() ) { + final FromElement fromElement = ( FromElement ) iter.next(); + JoinSequence join = fromElement.getJoinSequence(); + join.setSelector( + new JoinSequence.Selector() { + public boolean includeSubclasses(String alias) { + // The uber-rule here is that we need to include subclass joins if + // the FromElement is in any way dereferenced by a property from + // the subclass table; otherwise we end up with column references + // qualified by a non-existent table reference in the resulting SQL... + boolean containsTableAlias = fromClause.containsTableAlias( alias ); + if ( fromElement.isDereferencedBySubclassProperty() ) { + // TODO : or should we return 'containsTableAlias'?? + log.trace( "forcing inclusion of extra joins [alias=" + alias + ", containsTableAlias=" + containsTableAlias + "]" ); + return true; + } + boolean shallowQuery = queryTranslatorImpl.isShallowQuery(); + boolean includeSubclasses = fromElement.isIncludeSubclasses(); + boolean subQuery = fromClause.isSubQuery(); + return includeSubclasses && containsTableAlias && !subQuery && !shallowQuery; + } + } + ); + addJoinNodes( query, join, fromElement, inSubquery ); + } + + } + + private void addJoinNodes(QueryNode query, JoinSequence join, FromElement fromElement, boolean inSubquery) { + // Generate FROM and WHERE fragments for the from element. + JoinFragment joinFragment = join.toJoinFragment( + inSubquery ? Collections.EMPTY_MAP : queryTranslatorImpl.getEnabledFilters(), + fromElement.useFromFragment() || fromElement.isDereferencedBySuperclassOrSubclassProperty(), + fromElement.getWithClauseFragment(), + fromElement.getWithClauseJoinAlias() + ); + + String frag = joinFragment.toFromFragmentString(); + String whereFrag = joinFragment.toWhereFragmentString(); + + // If the from element represents a JOIN_FRAGMENT and it is + // a theta-style join, convert its type from JOIN_FRAGMENT + // to FROM_FRAGMENT + if ( fromElement.getType() == JOIN_FRAGMENT && + ( join.isThetaStyle() || StringHelper.isNotEmpty( whereFrag ) ) ) { + fromElement.setType( FROM_FRAGMENT ); + fromElement.getJoinSequence().setUseThetaStyle( true ); // this is used during SqlGenerator processing + } + + // If there is a FROM fragment and the FROM element is an explicit, then add the from part. + if ( fromElement.useFromFragment() /*&& StringHelper.isNotEmpty( frag )*/ ) { + String fromFragment = processFromFragment( frag, join ); + if ( log.isDebugEnabled() ) { + log.debug( "Using FROM fragment [" + fromFragment + "]" ); + } + fromElement.setText( fromFragment.trim() ); // Set the text of the fromElement. + } + andFactory.addWhereFragment( joinFragment, whereFrag, query, fromElement ); + } + + private String processFromFragment(String frag, JoinSequence join) { + String fromFragment = frag.trim(); + // The FROM fragment will probably begin with ', '. Remove this if it is present. + if ( fromFragment.startsWith( ", " ) ) { + fromFragment = fromFragment.substring( 2 ); + } + return fromFragment; + } + +} diff --git a/src/org/hibernate/hql/ast/util/LiteralProcessor.java b/src/org/hibernate/hql/ast/util/LiteralProcessor.java new file mode 100644 index 0000000000..510b556666 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/LiteralProcessor.java @@ -0,0 +1,297 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.HibernateException; +import org.hibernate.dialect.Dialect; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.antlr.SqlTokenTypes; +import org.hibernate.hql.ast.HqlSqlWalker; +import org.hibernate.hql.ast.InvalidPathException; +import org.hibernate.hql.ast.tree.DotNode; +import org.hibernate.hql.ast.tree.FromClause; +import org.hibernate.hql.ast.tree.IdentNode; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.InFragment; +import org.hibernate.type.LiteralType; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.ReflectHelper; + +import antlr.SemanticException; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.math.BigDecimal; +import java.text.DecimalFormat; + +/** + * A delegate that handles literals and constants for HqlSqlWalker, performing the token replacement functions and + * classifying literals. + * + * @author josh Sep 2, 2004 7:15:30 AM + */ +public class LiteralProcessor implements HqlSqlTokenTypes { + /** + * Indicates that Float and Double literal values should + * be treated using the SQL "exact" format (i.e., '.001') + */ + public static final int EXACT = 0; + /** + * Indicates that Float and Double literal values should + * be treated using the SQL "approximate" format (i.e., '1E-3') + */ + public static final int APPROXIMATE = 1; + /** + * In what format should Float and Double literal values be sent + * to the database? + * @see #EXACT, #APPROXIMATE + */ + public static int DECIMAL_LITERAL_FORMAT = EXACT; + + private static final Log log = LogFactory.getLog( LiteralProcessor.class ); + + private HqlSqlWalker walker; + + public LiteralProcessor(HqlSqlWalker hqlSqlWalker) { + this.walker = hqlSqlWalker; + } + + public boolean isAlias(String alias) { + FromClause from = walker.getCurrentFromClause(); + while ( from.isSubQuery() ) { + if ( from.containsClassAlias(alias) ) { + return true; + } + from = from.getParentFromClause(); + } + return from.containsClassAlias(alias); + } + + public void processConstant(AST constant, boolean resolveIdent) throws SemanticException { + // If the constant is an IDENT, figure out what it means... + boolean isIdent = ( constant.getType() == IDENT || constant.getType() == WEIRD_IDENT ); + if ( resolveIdent && isIdent && isAlias( constant.getText() ) ) { // IDENT is a class alias in the FROM. + IdentNode ident = ( IdentNode ) constant; + // Resolve to an identity column. + ident.resolve(false, true); + } + else { // IDENT might be the name of a class. + Queryable queryable = walker.getSessionFactoryHelper().findQueryableUsingImports( constant.getText() ); + if ( isIdent && queryable != null ) { + constant.setText( queryable.getDiscriminatorSQLValue() ); + } + // Otherwise, it's a literal. + else { + processLiteral( constant ); + } + } + } + + public void lookupConstant(DotNode node) throws SemanticException { + String text = ASTUtil.getPathText( node ); + Queryable persister = walker.getSessionFactoryHelper().findQueryableUsingImports( text ); + if ( persister != null ) { + // the name of an entity class + final String discrim = persister.getDiscriminatorSQLValue(); + node.setDataType( persister.getDiscriminatorType() ); + if ( InFragment.NULL.equals(discrim) || InFragment.NOT_NULL.equals(discrim) ) { + throw new InvalidPathException( "subclass test not allowed for null or not null discriminator: '" + text + "'" ); + } + else { + setSQLValue( node, text, discrim ); //the class discriminator value + } + } + else { + Object value = ReflectHelper.getConstantValue( text ); + if ( value == null ) { + throw new InvalidPathException( "Invalid path: '" + text + "'" ); + } + else { + setConstantValue( node, text, value ); + } + } + } + + private void setSQLValue(DotNode node, String text, String value) { + if ( log.isDebugEnabled() ) { + log.debug( "setSQLValue() " + text + " -> " + value ); + } + node.setFirstChild( null ); // Chop off the rest of the tree. + node.setType( SqlTokenTypes.SQL_TOKEN ); + node.setText(value); + node.setResolvedConstant( text ); + } + + private void setConstantValue(DotNode node, String text, Object value) { + if ( log.isDebugEnabled() ) { + log.debug( "setConstantValue() " + text + " -> " + value + " " + value.getClass().getName() ); + } + node.setFirstChild( null ); // Chop off the rest of the tree. + if ( value instanceof String ) { + node.setType( SqlTokenTypes.QUOTED_STRING ); + } + else if ( value instanceof Character ) { + node.setType( SqlTokenTypes.QUOTED_STRING ); + } + else if ( value instanceof Byte ) { + node.setType( SqlTokenTypes.NUM_INT ); + } + else if ( value instanceof Short ) { + node.setType( SqlTokenTypes.NUM_INT ); + } + else if ( value instanceof Integer ) { + node.setType( SqlTokenTypes.NUM_INT ); + } + else if ( value instanceof Long ) { + node.setType( SqlTokenTypes.NUM_LONG ); + } + else if ( value instanceof Double ) { + node.setType( SqlTokenTypes.NUM_DOUBLE ); + } + else if ( value instanceof Float ) { + node.setType( SqlTokenTypes.NUM_FLOAT ); + } + else { + node.setType( SqlTokenTypes.CONSTANT ); + } + Type type; + try { + type = TypeFactory.heuristicType( value.getClass().getName() ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + if ( type == null ) { + throw new QueryException( QueryTranslator.ERROR_CANNOT_DETERMINE_TYPE + node.getText() ); + } + try { + LiteralType literalType = ( LiteralType ) type; + Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect(); + node.setText( literalType.objectToSQLString( value, dialect ) ); + } + catch ( Exception e ) { + throw new QueryException( QueryTranslator.ERROR_CANNOT_FORMAT_LITERAL + node.getText(), e ); + } + node.setDataType( type ); + node.setResolvedConstant( text ); + } + + public void processBoolean(AST constant) { + // TODO: something much better - look at the type of the other expression! + // TODO: Have comparisonExpression and/or arithmeticExpression rules complete the resolution of boolean nodes. + String replacement = ( String ) walker.getTokenReplacements().get( constant.getText() ); + if ( replacement != null ) { + constant.setText( replacement ); + } + else { + boolean bool = "true".equals( constant.getText().toLowerCase() ); + Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect(); + constant.setText( dialect.toBooleanValueString(bool) ); + } + } + + private void processLiteral(AST constant) { + String replacement = ( String ) walker.getTokenReplacements().get( constant.getText() ); + if ( replacement != null ) { + if ( log.isDebugEnabled() ) { + log.debug( "processConstant() : Replacing '" + constant.getText() + "' with '" + replacement + "'" ); + } + constant.setText( replacement ); + } + } + + public void processNumeric(AST literal) { + if ( literal.getType() == NUM_INT || literal.getType() == NUM_LONG ) { + literal.setText( determineIntegerRepresentation( literal.getText(), literal.getType() ) ); + } + else if ( literal.getType() == NUM_FLOAT || literal.getType() == NUM_DOUBLE ) { + literal.setText( determineDecimalRepresentation( literal.getText(), literal.getType() ) ); + } + else { + log.warn( "Unexpected literal token type [" + literal.getType() + "] passed for numeric processing" ); + } + } + + private String determineIntegerRepresentation(String text, int type) { + try { + if ( type == NUM_INT ) { + try { + return Integer.valueOf( text ).toString(); + } + catch( NumberFormatException e ) { + log.trace( "could not format incoming text [" + text + "] as a NUM_INT; assuming numeric overflow and attempting as NUM_LONG" ); + } + } + String literalValue = text; + if ( literalValue.endsWith( "l" ) || literalValue.endsWith( "L" ) ) { + literalValue = literalValue.substring( 0, literalValue.length() - 1 ); + } + return Long.valueOf( literalValue ).toString(); + } + catch( Throwable t ) { + throw new HibernateException( "Could not parse literal [" + text + "] as integer", t ); + } + } + + public String determineDecimalRepresentation(String text, int type) { + String literalValue = text; + if ( type == NUM_FLOAT ) { + if ( literalValue.endsWith( "f" ) || literalValue.endsWith( "F" ) ) { + literalValue = literalValue.substring( 0, literalValue.length() - 1 ); + } + } + else if ( type == NUM_DOUBLE ) { + if ( literalValue.endsWith( "d" ) || literalValue.endsWith( "D" ) ) { + literalValue = literalValue.substring( 0, literalValue.length() - 1 ); + } + } + + BigDecimal number = null; + try { + number = new BigDecimal( literalValue ); + } + catch( Throwable t ) { + throw new HibernateException( "Could not parse literal [" + text + "] as big-decimal", t ); + } + + return formatters[ DECIMAL_LITERAL_FORMAT ].format( number ); + } + + private static interface DecimalFormatter { + String format(BigDecimal number); + } + + private static class ExactDecimalFormatter implements DecimalFormatter { + public String format(BigDecimal number) { + return number.toString(); + } + } + + private static class ApproximateDecimalFormatter implements DecimalFormatter { + private static final String FORMAT_STRING = "#0.0E0"; + public String format(BigDecimal number) { + try { + // TODO : what amount of significant digits need to be supported here? + // - from the DecimalFormat docs: + // [significant digits] = [minimum integer digits] + [maximum fraction digits] + DecimalFormat jdkFormatter = new DecimalFormat( FORMAT_STRING ); + jdkFormatter.setMinimumIntegerDigits( 1 ); + jdkFormatter.setMaximumFractionDigits( Integer.MAX_VALUE ); + return jdkFormatter.format( number ); + } + catch( Throwable t ) { + throw new HibernateException( "Unable to format decimal literal in approximate format [" + number.toString() + "]", t ); + } + } + } + + private static final DecimalFormatter[] formatters = new DecimalFormatter[] { + new ExactDecimalFormatter(), + new ApproximateDecimalFormatter() + }; +} \ No newline at end of file diff --git a/src/org/hibernate/hql/ast/util/NodeTraverser.java b/src/org/hibernate/hql/ast/util/NodeTraverser.java new file mode 100644 index 0000000000..ea2096bce8 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/NodeTraverser.java @@ -0,0 +1,44 @@ +package org.hibernate.hql.ast.util; + +import antlr.collections.AST; + +/** + * A visitor for traversing an AST tree. + * + * @author Steve Ebersole + */ +public class NodeTraverser { + public static interface VisitationStrategy { + public void visit(AST node); + } + + private final VisitationStrategy strategy; + + public NodeTraverser(VisitationStrategy strategy) { + this.strategy = strategy; + } + + /** + * Traverse the AST tree depth first. + *

    + * Note that the AST passed in is not visited itself. Visitation starts + * with its children. + * + * @param ast + */ + public void traverseDepthFirst(AST ast) { + if ( ast == null ) { + throw new IllegalArgumentException( "node to traverse cannot be null!" ); + } + visitDepthFirst( ast.getFirstChild() ); + } + + private void visitDepthFirst(AST ast) { + if ( ast == null ) { + return; + } + strategy.visit( ast ); + visitDepthFirst( ast.getFirstChild() ); + visitDepthFirst( ast.getNextSibling() ); + } +} diff --git a/src/org/hibernate/hql/ast/util/PathHelper.java b/src/org/hibernate/hql/ast/util/PathHelper.java new file mode 100644 index 0000000000..c486fbb1c3 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/PathHelper.java @@ -0,0 +1,54 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.util.StringHelper; + +import antlr.ASTFactory; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Provides utility methods for paths. + * + * @author josh Sep 14, 2004 8:16:29 AM + */ +public final class PathHelper { + + private static final Log log = LogFactory.getLog( PathHelper.class ); + + private PathHelper() { + } + + /** + * Turns a path into an AST. + * + * @param path The path. + * @param factory The AST factory to use. + * @return An HQL AST representing the path. + */ + public static AST parsePath(String path, ASTFactory factory) { + String[] identifiers = StringHelper.split( ".", path ); + AST lhs = null; + for ( int i = 0; i < identifiers.length; i++ ) { + String identifier = identifiers[i]; + AST child = ASTUtil.create( factory, HqlSqlTokenTypes.IDENT, identifier ); + if ( i == 0 ) { + lhs = child; + } + else { + lhs = ASTUtil.createBinarySubtree( factory, HqlSqlTokenTypes.DOT, ".", lhs, child ); + } + } + if ( log.isDebugEnabled() ) { + log.debug( "parsePath() : " + path + " -> " + ASTUtil.getDebugString( lhs ) ); + } + return lhs; + } + + public static String getAlias(String path) { + return StringHelper.root( path ); + } +} diff --git a/src/org/hibernate/hql/ast/util/SessionFactoryHelper.java b/src/org/hibernate/hql/ast/util/SessionFactoryHelper.java new file mode 100644 index 0000000000..a3ac3a9ea4 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/SessionFactoryHelper.java @@ -0,0 +1,392 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import java.util.HashMap; +import java.util.Map; + +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.engine.JoinSequence; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.NameGenerator; +import org.hibernate.hql.ast.DetailedSemanticException; +import org.hibernate.hql.ast.QuerySyntaxException; +import org.hibernate.hql.ast.tree.SqlNode; +import org.hibernate.persister.collection.CollectionPropertyMapping; +import org.hibernate.persister.collection.CollectionPropertyNames; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.type.AssociationType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; + +import antlr.SemanticException; +import antlr.collections.AST; + +/** + * Helper for performing common and/or complex operations with the + * {@link SessionFactoryImplementor} during translation of an HQL query. + * + * @author Joshua Davis + */ +public class SessionFactoryHelper { + + private SessionFactoryImplementor sfi; + private Map collectionPropertyMappingByRole; + + /** + * Construct a new SessionFactoryHelper instance. + * + * @param sfi The SessionFactory impl to be encapsualted. + */ + public SessionFactoryHelper(SessionFactoryImplementor sfi) { + this.sfi = sfi; + collectionPropertyMappingByRole = new HashMap(); + } + + /** + * Get a handle to the encapsulated SessionFactory. + * + * @return The encapsulated SessionFactory. + */ + public SessionFactoryImplementor getFactory() { + return sfi; + } + + /** + * Does the given persister define a physical discriminator column + * for the purpose of inheritence discrimination? + * + * @param persister The persister to be checked. + * @return True if the persister does define an actual discriminator column. + */ + public boolean hasPhysicalDiscriminatorColumn(Queryable persister) { + if ( persister.getDiscriminatorType() != null ) { + String discrimColumnName = persister.getDiscriminatorColumnName(); + // Needed the "clazz_" check to work around union-subclasses + // TODO : is there a way to tell whether a persister is truly discrim-column based inheritence? + if ( discrimColumnName != null && !"clazz_".equals( discrimColumnName ) ) { + return true; + } + } + return false; + } + + /** + * Given a (potentially unqualified) class name, locate its imported qualified name. + * + * @param className The potentially unqualified class name + * @return The qualified class name. + */ + public String getImportedClassName(String className) { + return sfi.getImportedClassName( className ); + } + + /** + * Given a (potentially unqualified) class name, locate its persister. + * + * @param className The (potentially unqualified) class name. + * @return The defined persister for this class, or null if none found. + */ + public Queryable findQueryableUsingImports(String className) { + return findQueryableUsingImports( sfi, className ); + } + + + /** + * Given a (potentially unqualified) class name, locate its persister. + * + * @param sfi The session factory implementor. + * @param className The (potentially unqualified) class name. + * @return The defined persister for this class, or null if none found. + */ + public static Queryable findQueryableUsingImports(SessionFactoryImplementor sfi, String className) { + final String importedClassName = sfi.getImportedClassName( className ); + if ( importedClassName == null ) { + return null; + } + try { + return ( Queryable ) sfi.getEntityPersister( importedClassName ); + } + catch ( MappingException me ) { + return null; + } + } + + /** + * Locate the persister by class or entity name. + * + * @param name The class or entity name + * @return The defined persister for this entity, or null if none found. + * @throws MappingException + */ + private EntityPersister findEntityPersisterByName(String name) throws MappingException { + // First, try to get the persister using the given name directly. + try { + return sfi.getEntityPersister( name ); + } + catch ( MappingException ignore ) { + // unable to locate it using this name + } + + // If that didn't work, try using the 'import' name. + String importedClassName = sfi.getImportedClassName( name ); + if ( importedClassName == null ) { + return null; + } + return sfi.getEntityPersister( importedClassName ); + } + + /** + * Locate the persister by class or entity name, requiring that such a persister + * exist. + * + * @param name The class or entity name + * @return The defined persister for this entity + * @throws SemanticException Indicates the persister could not be found + */ + public EntityPersister requireClassPersister(String name) throws SemanticException { + EntityPersister cp; + try { + cp = findEntityPersisterByName( name ); + if ( cp == null ) { + throw new QuerySyntaxException( name + " is not mapped" ); + } + } + catch ( MappingException e ) { + throw new DetailedSemanticException( e.getMessage(), e ); + } + return cp; + } + + /** + * Locate the collection persister by the collection role. + * + * @param role The collection role name. + * @return The defined CollectionPersister for this collection role, or null. + */ + public QueryableCollection getCollectionPersister(String role) { + try { + return ( QueryableCollection ) sfi.getCollectionPersister( role ); + } + catch ( ClassCastException cce ) { + throw new QueryException( "collection is not queryable: " + role ); + } + catch ( Exception e ) { + throw new QueryException( "collection not found: " + role ); + } + } + + /** + * Locate the collection persister by the collection role, requiring that + * such a persister exist. + * + * @param role The collection role name. + * @return The defined CollectionPersister for this collection role. + * @throws QueryException Indicates that the collection persister could not be found. + */ + public QueryableCollection requireQueryableCollection(String role) throws QueryException { + try { + QueryableCollection queryableCollection = ( QueryableCollection ) sfi.getCollectionPersister( role ); + if ( queryableCollection != null ) { + collectionPropertyMappingByRole.put( role, new CollectionPropertyMapping( queryableCollection ) ); + } + return queryableCollection; + } + catch ( ClassCastException cce ) { + throw new QueryException( "collection role is not queryable: " + role ); + } + catch ( Exception e ) { + throw new QueryException( "collection role not found: " + role ); + } + } + + /** + * Retreive a PropertyMapping describing the given collection role. + * + * @param role The collection role for whcih to retrieve the property mapping. + * @return The property mapping. + */ + private PropertyMapping getCollectionPropertyMapping(String role) { + return ( PropertyMapping ) collectionPropertyMappingByRole.get( role ); + } + + /** + * Retrieves the column names corresponding to the collection elements for the given + * collection role. + * + * @param role The collection role + * @param roleAlias The sql column-qualification alias (i.e., the table alias) + * @return the collection element columns + */ + public String[] getCollectionElementColumns(String role, String roleAlias) { + return getCollectionPropertyMapping( role ).toColumns( roleAlias, CollectionPropertyNames.COLLECTION_ELEMENTS ); + } + + /** + * Generate an empty join sequence instance. + * + * @return The generate join sequence. + */ + public JoinSequence createJoinSequence() { + return new JoinSequence( sfi ); + } + + /** + * Generate a join sequence representing the given association type. + * + * @param implicit Should implicit joins (theta-style) or explicit joins (ANSI-style) be rendered + * @param associationType The type representing the thing to be joined into. + * @param tableAlias The table alias to use in qualifing the join conditions + * @param joinType The type of join to render (inner, outer, etc); see {@link org.hibernate.sql.JoinFragment} + * @param columns The columns making up the condition of the join. + * @return The generated join sequence. + */ + public JoinSequence createJoinSequence(boolean implicit, AssociationType associationType, String tableAlias, int joinType, String[] columns) { + JoinSequence joinSequence = createJoinSequence(); + joinSequence.setUseThetaStyle( implicit ); // Implicit joins use theta style (WHERE pk = fk), explicit joins use JOIN (after from) + joinSequence.addJoin( associationType, tableAlias, joinType, columns ); + return joinSequence; + } + + /** + * Create a join sequence rooted at the given collection. + * + * @param collPersister The persister for the collection at which the join should be rooted. + * @param collectionName The alias to use for qualifying column references. + * @return The generated join sequence. + */ + public JoinSequence createCollectionJoinSequence(QueryableCollection collPersister, String collectionName) { + JoinSequence joinSequence = createJoinSequence(); + joinSequence.setRoot( collPersister, collectionName ); + joinSequence.setUseThetaStyle( true ); // TODO: figure out how this should be set. +/////////////////////////////////////////////////////////////////////////////// +// This was the reason for failures regarding INDEX_OP and subclass joins on +// theta-join dialects; not sure what behaviour we were trying to emulate ;) +// joinSequence = joinSequence.getFromPart(); // Emulate the old addFromOnly behavior. + return joinSequence; + } + + /** + * Determine the name of the property for the entity encapsulated by the + * given type which represents the id or unique-key. + * + * @param entityType The type representing the entity. + * @return The corresponding property name + * @throws QueryException Indicates such a property could not be found. + */ + public String getIdentifierOrUniqueKeyPropertyName(EntityType entityType) { + try { + return entityType.getIdentifierOrUniqueKeyPropertyName( sfi ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + } + + /** + * Retreive the number of columns represented by this type. + * + * @param type The type. + * @return The number of columns. + */ + public int getColumnSpan(Type type) { + return type.getColumnSpan( sfi ); + } + + /** + * Given a collection type, determine the entity name of the elements + * contained within instance of that collection. + * + * @param collectionType The collection type to check. + * @return The entity name of the elements of this collection. + */ + public String getAssociatedEntityName(CollectionType collectionType) { + return collectionType.getAssociatedEntityName( sfi ); + } + + /** + * Given a collection type, determine the Type representing elements + * within instances of that collection. + * + * @param collectionType The collection type to be checked. + * @return The Type of the elements of the collection. + */ + private Type getElementType(CollectionType collectionType) { + return collectionType.getElementType( sfi ); + } + + /** + * Essentially the same as {@link #getElementType}, but requiring that the + * element type be an association type. + * + * @param collectionType The collection type to be checked. + * @return The AssociationType of the elements of the collection. + */ + public AssociationType getElementAssociationType(CollectionType collectionType) { + return ( AssociationType ) getElementType( collectionType ); + } + + /** + * Locate a registered sql function by name. + * + * @param functionName The name of the function to locate + * @return The sql function, or null if not found. + */ + public SQLFunction findSQLFunction(String functionName) { + return sfi.getSqlFunctionRegistry().findSQLFunction( functionName.toLowerCase() ); + } + + /** + * Locate a registered sql function by name, requiring that such a registered function exist. + * + * @param functionName The name of the function to locate + * @return The sql function. + * @throws QueryException Indicates no matching sql functions could be found. + */ + private SQLFunction requireSQLFunction(String functionName) { + SQLFunction f = findSQLFunction( functionName ); + if ( f == null ) { + throw new QueryException( "Unable to find SQL function: " + functionName ); + } + return f; + } + + /** + * Find the function return type given the function name and the first argument expression node. + * + * @param functionName The function name. + * @param first The first argument expression. + * @return the function return type given the function name and the first argument expression node. + */ + public Type findFunctionReturnType(String functionName, AST first) { + // locate the registered function by the given name + SQLFunction sqlFunction = requireSQLFunction( functionName ); + + // determine the type of the first argument... + Type argumentType = null; + if ( first != null ) { + if ( "cast".equals(functionName) ) { + argumentType = TypeFactory.heuristicType( first.getNextSibling().getText() ); + } + else if ( first instanceof SqlNode ) { + argumentType = ( (SqlNode) first ).getDataType(); + } + } + + return sqlFunction.getReturnType( argumentType, sfi ); + } + + public String[][] generateColumnNames(Type[] sqlResultTypes) { + return NameGenerator.generateColumnNames( sqlResultTypes, sfi ); + } + + public boolean isStrictJPAQLComplianceEnabled() { + return sfi.getSettings().isStrictJPAQLCompliance(); + } +} diff --git a/src/org/hibernate/hql/ast/util/SyntheticAndFactory.java b/src/org/hibernate/hql/ast/util/SyntheticAndFactory.java new file mode 100644 index 0000000000..11eb7db735 --- /dev/null +++ b/src/org/hibernate/hql/ast/util/SyntheticAndFactory.java @@ -0,0 +1,134 @@ +// $Id$ +package org.hibernate.hql.ast.util; + +import java.util.Map; +import java.util.StringTokenizer; + +import org.hibernate.hql.antlr.HqlSqlTokenTypes; +import org.hibernate.hql.ast.tree.FromElement; +import org.hibernate.hql.ast.tree.QueryNode; +import org.hibernate.hql.ast.tree.RestrictableStatement; +import org.hibernate.hql.ast.tree.SqlFragment; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.JoinFragment; +import org.hibernate.util.StringHelper; + +import antlr.ASTFactory; +import antlr.collections.AST; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Creates synthetic and nodes based on the where fragment part of a JoinSequence. + * + * @author josh Dec 5, 2004 12:25:20 PM + */ +public class SyntheticAndFactory implements HqlSqlTokenTypes { + private static final Log log = LogFactory.getLog( SyntheticAndFactory.class ); + + private ASTFactory astFactory; + private AST thetaJoins; + private AST filters; + + public SyntheticAndFactory(ASTFactory astFactory) { + this.astFactory = astFactory; + } + + public void addWhereFragment(JoinFragment joinFragment, String whereFragment, QueryNode query, FromElement fromElement) { + + if ( whereFragment == null ) { + return; + } + + whereFragment = whereFragment.trim(); + if ( StringHelper.isEmpty( whereFragment ) ) { + return; + } + else if ( !fromElement.useWhereFragment() && !joinFragment.hasThetaJoins() ) { + return; + } + + // Forcefully remove leading ands from where fragments; the grammar will + // handle adding them + if ( whereFragment.startsWith( "and" ) ) { + whereFragment = whereFragment.substring( 4 ); + } + + if ( log.isDebugEnabled() ) log.debug( "Using WHERE fragment [" + whereFragment + "]" ); + + SqlFragment fragment = ( SqlFragment ) ASTUtil.create( astFactory, SQL_TOKEN, whereFragment ); + fragment.setJoinFragment( joinFragment ); + fragment.setFromElement( fromElement ); + + // Filter conditions need to be inserted before the HQL where condition and the + // theta join node. This is because org.hibernate.loader.Loader binds the filter parameters first, + // then it binds all the HQL query parameters, see org.hibernate.loader.Loader.processFilterParameters(). + if ( fragment.getFromElement().isFilter() || fragment.hasFilterCondition() ) { + if ( filters == null ) { + // Find or create the WHERE clause + AST where = query.getWhereClause(); + // Create a new FILTERS node as a parent of all filters + filters = astFactory.create( FILTERS, "{filter conditions}" ); + // Put the FILTERS node before the HQL condition and theta joins + ASTUtil.insertChild( where, filters ); + } + + // add the current fragment to the FILTERS node + filters.addChild( fragment ); + } + else { + if ( thetaJoins == null ) { + // Find or create the WHERE clause + AST where = query.getWhereClause(); + // Create a new THETA_JOINS node as a parent of all filters + thetaJoins = astFactory.create( THETA_JOINS, "{theta joins}" ); + // Put the THETA_JOINS node before the HQL condition, after the filters. + if (filters==null) { + ASTUtil.insertChild( where, thetaJoins ); + } + else { + ASTUtil.insertSibling( thetaJoins, filters ); + } + } + + // add the current fragment to the THETA_JOINS node + thetaJoins.addChild(fragment); + } + + } + + public void addDiscriminatorWhereFragment(RestrictableStatement statement, Queryable persister, Map enabledFilters, String alias) { + String whereFragment = persister.filterFragment( alias, enabledFilters ).trim(); + if ( "".equals( whereFragment ) ) { + return; + } + if ( whereFragment.startsWith( "and" ) ) { + whereFragment = whereFragment.substring( 4 ); + } + + // Need to parse off the column qualifiers; this is assuming (which is true as of now) + // that this is only used from update and delete HQL statement parsing + whereFragment = StringHelper.replace( whereFragment, persister.generateFilterConditionAlias( alias ) + ".", "" ); + + // Note: this simply constructs a "raw" SQL_TOKEN representing the + // where fragment and injects this into the tree. This "works"; + // however it is probably not the best long-term solution. + // + // At some point we probably want to apply an additional grammar to + // properly tokenize this where fragment into constituent parts + // focused on the operators embedded within the fragment. + AST discrimNode = astFactory.create( SQL_TOKEN, whereFragment ); + + if ( statement.getWhereClause().getNumberOfChildren() == 0 ) { + statement.getWhereClause().setFirstChild( discrimNode ); + } + else { + AST and = astFactory.create( AND, "{and}" ); + AST currentFirstChild = statement.getWhereClause().getFirstChild(); + and.setFirstChild( discrimNode ); + and.addChild( currentFirstChild ); + statement.getWhereClause().setFirstChild( and ); + } + } +} diff --git a/src/org/hibernate/hql/classic/ClassicQueryTranslatorFactory.java b/src/org/hibernate/hql/classic/ClassicQueryTranslatorFactory.java new file mode 100755 index 0000000000..cdcfef6fc0 --- /dev/null +++ b/src/org/hibernate/hql/classic/ClassicQueryTranslatorFactory.java @@ -0,0 +1,41 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.FilterTranslator; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.hql.QueryTranslatorFactory; + +import java.util.Map; + +/** + * Generates translators which uses the older hand-written parser to perform + * the translation. + * + * @author Gavin King + */ +public class ClassicQueryTranslatorFactory implements QueryTranslatorFactory { + + /** + * @see QueryTranslatorFactory#createQueryTranslator + */ + public QueryTranslator createQueryTranslator( + String queryIdentifier, + String queryString, + Map filters, + SessionFactoryImplementor factory) { + return new QueryTranslatorImpl( queryIdentifier, queryString, filters, factory ); + } + + /** + * @see QueryTranslatorFactory#createFilterTranslator + */ + public FilterTranslator createFilterTranslator( + String queryIdentifier, + String queryString, + Map filters, + SessionFactoryImplementor factory) { + return new QueryTranslatorImpl( queryIdentifier, queryString, filters, factory ); + } + +} diff --git a/src/org/hibernate/hql/classic/ClauseParser.java b/src/org/hibernate/hql/classic/ClauseParser.java new file mode 100644 index 0000000000..6463883da5 --- /dev/null +++ b/src/org/hibernate/hql/classic/ClauseParser.java @@ -0,0 +1,129 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * Parses the Hibernate query into its constituent clauses. + */ +public class ClauseParser implements Parser { + + private Parser child; + private List selectTokens; + private boolean cacheSelectTokens = false; + private boolean byExpected = false; + private int parenCount = 0; + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + String lcToken = token.toLowerCase(); + + if ( "(".equals( token ) ) { + parenCount++; + } + else if ( ")".equals( token ) ) { + parenCount--; + } + + if ( byExpected && !lcToken.equals( "by" ) ) { + throw new QueryException( "BY expected after GROUP or ORDER: " + token ); + } + + boolean isClauseStart = parenCount == 0; //ignore subselect keywords + + if ( isClauseStart ) { + if ( lcToken.equals( "select" ) ) { + selectTokens = new ArrayList(); + cacheSelectTokens = true; + } + else if ( lcToken.equals( "from" ) ) { + child = new FromParser(); + child.start( q ); + cacheSelectTokens = false; + } + else if ( lcToken.equals( "where" ) ) { + endChild( q ); + child = new WhereParser(); + child.start( q ); + } + else if ( lcToken.equals( "order" ) ) { + endChild( q ); + child = new OrderByParser(); + byExpected = true; + } + else if ( lcToken.equals( "having" ) ) { + endChild( q ); + child = new HavingParser(); + child.start( q ); + } + else if ( lcToken.equals( "group" ) ) { + endChild( q ); + child = new GroupByParser(); + byExpected = true; + } + else if ( lcToken.equals( "by" ) ) { + if ( !byExpected ) throw new QueryException( "GROUP or ORDER expected before BY" ); + child.start( q ); + byExpected = false; + } + else { + isClauseStart = false; + } + } + + if ( !isClauseStart ) { + if ( cacheSelectTokens ) { + selectTokens.add( token ); + } + else { + if ( child == null ) { + throw new QueryException( "query must begin with SELECT or FROM: " + token ); + } + else { + child.token( token, q ); + } + } + } + + } + + private void endChild(QueryTranslatorImpl q) throws QueryException { + if ( child == null ) { + //null child could occur for no from clause in a filter + cacheSelectTokens = false; + } + else { + child.end( q ); + } + } + + public void start(QueryTranslatorImpl q) { + } + + public void end(QueryTranslatorImpl q) throws QueryException { + endChild( q ); + if ( selectTokens != null ) { + child = new SelectParser(); + child.start( q ); + Iterator iter = selectTokens.iterator(); + while ( iter.hasNext() ) { + token( ( String ) iter.next(), q ); + } + child.end( q ); + } + byExpected = false; + parenCount = 0; + cacheSelectTokens = false; + } + +} + + + + + + + diff --git a/src/org/hibernate/hql/classic/FromParser.java b/src/org/hibernate/hql/classic/FromParser.java new file mode 100644 index 0000000000..4e7725ea33 --- /dev/null +++ b/src/org/hibernate/hql/classic/FromParser.java @@ -0,0 +1,231 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.JoinFragment; + +import java.util.HashMap; +import java.util.Map; + +/** + * Parses the from clause of a hibernate query, looking for tables and + * aliases for the SQL query. + */ + +public class FromParser implements Parser { + + private final PathExpressionParser peParser = new FromPathExpressionParser(); + private String entityName; + private String alias; + private boolean afterIn; + private boolean afterAs; + private boolean afterClass; + private boolean expectingJoin; + private boolean expectingIn; + private boolean expectingAs; + private boolean afterJoinType; + private int joinType; + private boolean afterFetch; + + private static final int NONE = -666; + + private static final Map JOIN_TYPES = new HashMap(); + + static { + JOIN_TYPES.put( "left", new Integer( JoinFragment.LEFT_OUTER_JOIN ) ); + JOIN_TYPES.put( "right", new Integer( JoinFragment.RIGHT_OUTER_JOIN ) ); + JOIN_TYPES.put( "full", new Integer( JoinFragment.FULL_JOIN ) ); + JOIN_TYPES.put( "inner", new Integer( JoinFragment.INNER_JOIN ) ); + } + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + + // start by looking for HQL keywords... + String lcToken = token.toLowerCase(); + if ( lcToken.equals( "," ) ) { + if ( !( expectingJoin | expectingAs ) ) throw new QueryException( "unexpected token: ," ); + expectingJoin = false; + expectingAs = false; + } + else if ( lcToken.equals( "join" ) ) { + if ( !afterJoinType ) { + if ( !( expectingJoin | expectingAs ) ) throw new QueryException( "unexpected token: join" ); + // inner joins can be abbreviated to 'join' + joinType = JoinFragment.INNER_JOIN; + expectingJoin = false; + expectingAs = false; + } + else { + afterJoinType = false; + } + } + else if ( lcToken.equals( "fetch" ) ) { + if ( q.isShallowQuery() ) throw new QueryException( QueryTranslator.ERROR_CANNOT_FETCH_WITH_ITERATE ); + if ( joinType == NONE ) throw new QueryException( "unexpected token: fetch" ); + if ( joinType == JoinFragment.FULL_JOIN || joinType == JoinFragment.RIGHT_OUTER_JOIN ) { + throw new QueryException( "fetch may only be used with inner join or left outer join" ); + } + afterFetch = true; + } + else if ( lcToken.equals( "outer" ) ) { + // 'outer' is optional and is ignored + if ( !afterJoinType || + ( joinType != JoinFragment.LEFT_OUTER_JOIN && joinType != JoinFragment.RIGHT_OUTER_JOIN ) + ) { + throw new QueryException( "unexpected token: outer" ); + } + } + else if ( JOIN_TYPES.containsKey( lcToken ) ) { + if ( !( expectingJoin | expectingAs ) ) throw new QueryException( "unexpected token: " + token ); + joinType = ( ( Integer ) JOIN_TYPES.get( lcToken ) ).intValue(); + afterJoinType = true; + expectingJoin = false; + expectingAs = false; + } + else if ( lcToken.equals( "class" ) ) { + if ( !afterIn ) throw new QueryException( "unexpected token: class" ); + if ( joinType != NONE ) throw new QueryException( "outer or full join must be followed by path expression" ); + afterClass = true; + } + else if ( lcToken.equals( "in" ) ) { + if ( !expectingIn ) throw new QueryException( "unexpected token: in" ); + afterIn = true; + expectingIn = false; + } + else if ( lcToken.equals( "as" ) ) { + if ( !expectingAs ) throw new QueryException( "unexpected token: as" ); + afterAs = true; + expectingAs = false; + } + else { + + if ( afterJoinType ) throw new QueryException( "join expected: " + token ); + if ( expectingJoin ) throw new QueryException( "unexpected token: " + token ); + if ( expectingIn ) throw new QueryException( "in expected: " + token ); + + // now anything that is not a HQL keyword + + if ( afterAs || expectingAs ) { + + // (AS is always optional, for consistency with SQL/OQL) + + // process the "new" HQL style where aliases are assigned + // _after_ the class name or path expression ie. using + // the AS construction + + if ( entityName != null ) { + q.setAliasName( token, entityName ); + } + else { + throw new QueryException( "unexpected: as " + token ); + } + afterAs = false; + expectingJoin = true; + expectingAs = false; + entityName = null; + + } + else if ( afterIn ) { + + // process the "old" HQL style where aliases appear _first_ + // ie. using the IN or IN CLASS constructions + + if ( alias == null ) throw new QueryException( "alias not specified for: " + token ); + + if ( joinType != NONE ) throw new QueryException( "outer or full join must be followed by path expression" ); + + if ( afterClass ) { + // treat it as a classname + Queryable p = q.getEntityPersisterUsingImports( token ); + if ( p == null ) throw new QueryException( "persister not found: " + token ); + q.addFromClass( alias, p ); + } + else { + // treat it as a path expression + peParser.setJoinType( JoinFragment.INNER_JOIN ); + peParser.setUseThetaStyleJoin( true ); + ParserHelper.parse( peParser, q.unalias( token ), ParserHelper.PATH_SEPARATORS, q ); + if ( !peParser.isCollectionValued() ) throw new QueryException( "path expression did not resolve to collection: " + token ); + String nm = peParser.addFromCollection( q ); + q.setAliasName( alias, nm ); + } + + alias = null; + afterIn = false; + afterClass = false; + expectingJoin = true; + } + else { + + // handle a path expression or class name that + // appears at the start, in the "new" HQL + // style or an alias that appears at the start + // in the "old" HQL style + + Queryable p = q.getEntityPersisterUsingImports( token ); + if ( p != null ) { + // starts with the name of a mapped class (new style) + if ( joinType != NONE ) throw new QueryException( "outer or full join must be followed by path expression" ); + entityName = q.createNameFor( p.getEntityName() ); + q.addFromClass( entityName, p ); + expectingAs = true; + } + else if ( token.indexOf( '.' ) < 0 ) { + // starts with an alias (old style) + // semi-bad thing about this: can't re-alias another alias..... + alias = token; + expectingIn = true; + } + else { + + // starts with a path expression (new style) + + // force HQL style: from Person p inner join p.cars c + //if (joinType==NONE) throw new QueryException("path expression must be preceded by full, left, right or inner join"); + + //allow ODMG OQL style: from Person p, p.cars c + if ( joinType != NONE ) { + peParser.setJoinType( joinType ); + } + else { + peParser.setJoinType( JoinFragment.INNER_JOIN ); + } + peParser.setUseThetaStyleJoin( q.isSubquery() ); + + ParserHelper.parse( peParser, q.unalias( token ), ParserHelper.PATH_SEPARATORS, q ); + entityName = peParser.addFromAssociation( q ); + + joinType = NONE; + peParser.setJoinType( JoinFragment.INNER_JOIN ); + + if ( afterFetch ) { + peParser.fetch( q, entityName ); + afterFetch = false; + } + + expectingAs = true; + + } + } + } + + } + + public void start(QueryTranslatorImpl q) { + entityName = null; + alias = null; + afterIn = false; + afterAs = false; + afterClass = false; + expectingJoin = false; + expectingIn = false; + expectingAs = false; + joinType = NONE; + } + + public void end(QueryTranslatorImpl q) { + } + +} diff --git a/src/org/hibernate/hql/classic/FromPathExpressionParser.java b/src/org/hibernate/hql/classic/FromPathExpressionParser.java new file mode 100644 index 0000000000..1a5a89af91 --- /dev/null +++ b/src/org/hibernate/hql/classic/FromPathExpressionParser.java @@ -0,0 +1,32 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; +import org.hibernate.persister.collection.CollectionPropertyNames; +import org.hibernate.type.Type; + +public class FromPathExpressionParser extends PathExpressionParser { + + public void end(QueryTranslatorImpl q) throws QueryException { + if ( !isCollectionValued() ) { + Type type = getPropertyType(); + if ( type.isEntityType() ) { + // "finish off" the join + token( ".", q ); + token( null, q ); + } + else if ( type.isCollectionType() ) { + // default to element set if no elements() specified + token( ".", q ); + token( CollectionPropertyNames.COLLECTION_ELEMENTS, q ); + } + } + super.end( q ); + } + + protected void setExpectingCollectionIndex() throws QueryException { + throw new QueryException( "illegal syntax near collection-valued path expression in from: " + getCollectionName() ); + } + + +} diff --git a/src/org/hibernate/hql/classic/GroupByParser.java b/src/org/hibernate/hql/classic/GroupByParser.java new file mode 100644 index 0000000000..960424740f --- /dev/null +++ b/src/org/hibernate/hql/classic/GroupByParser.java @@ -0,0 +1,53 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; +import org.hibernate.util.StringHelper; + +/** + * Parses the GROUP BY clause of an aggregate query + */ +public class GroupByParser implements Parser { + + //this is basically a copy/paste of OrderByParser ... might be worth refactoring + + // This uses a PathExpressionParser but notice that compound paths are not valid, + // only bare names and simple paths: + + // SELECT p FROM p IN CLASS eg.Person GROUP BY p.Name, p.Address, p + + // The reason for this is SQL doesn't let you sort by an expression you are + // not returning in the result set. + + private final PathExpressionParser pathExpressionParser; + + { + pathExpressionParser = new PathExpressionParser(); + pathExpressionParser.setUseThetaStyleJoin( true ); //TODO: would be nice to use false, but issues with MS SQL + } + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + + if ( q.isName( StringHelper.root( token ) ) ) { + ParserHelper.parse( pathExpressionParser, q.unalias( token ), ParserHelper.PATH_SEPARATORS, q ); + q.appendGroupByToken( pathExpressionParser.getWhereColumn() ); + pathExpressionParser.addAssociation( q ); + } + else { + q.appendGroupByToken( token ); + } + } + + public void start(QueryTranslatorImpl q) throws QueryException { + } + + public void end(QueryTranslatorImpl q) throws QueryException { + } + + +} + + + + + diff --git a/src/org/hibernate/hql/classic/HavingParser.java b/src/org/hibernate/hql/classic/HavingParser.java new file mode 100644 index 0000000000..d7ff3ba070 --- /dev/null +++ b/src/org/hibernate/hql/classic/HavingParser.java @@ -0,0 +1,15 @@ +//$Id$ +package org.hibernate.hql.classic; + + +/** + * Parses the having clause of a hibernate query and translates it to an + * SQL having clause. + */ +public class HavingParser extends WhereParser { + + void appendToken(QueryTranslatorImpl q, String token) { + q.appendHavingToken( token ); + } + +} diff --git a/src/org/hibernate/hql/classic/OrderByParser.java b/src/org/hibernate/hql/classic/OrderByParser.java new file mode 100644 index 0000000000..4ece1a4469 --- /dev/null +++ b/src/org/hibernate/hql/classic/OrderByParser.java @@ -0,0 +1,50 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; +import org.hibernate.util.StringHelper; + +/** + * Parses the ORDER BY clause of a query + */ + +public class OrderByParser implements Parser { + + // This uses a PathExpressionParser but notice that compound paths are not valid, + // only bare names and simple paths: + + // SELECT p FROM p IN CLASS eg.Person ORDER BY p.Name, p.Address, p + + // The reason for this is SQL doesn't let you sort by an expression you are + // not returning in the result set. + + private final PathExpressionParser pathExpressionParser; + + { + pathExpressionParser = new PathExpressionParser(); + pathExpressionParser.setUseThetaStyleJoin( true ); //TODO: would be nice to use false, but issues with MS SQL + } + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + + if ( q.isName( StringHelper.root( token ) ) ) { + ParserHelper.parse( pathExpressionParser, q.unalias( token ), ParserHelper.PATH_SEPARATORS, q ); + q.appendOrderByToken( pathExpressionParser.getWhereColumn() ); + pathExpressionParser.addAssociation( q ); + } + else if ( token.startsWith( ParserHelper.HQL_VARIABLE_PREFIX ) ) { //named query parameter + q.addNamedParameter( token.substring( 1 ) ); + q.appendOrderByToken( "?" ); + } + else { + q.appendOrderByToken( token ); + } + } + + public void start(QueryTranslatorImpl q) throws QueryException { + } + + public void end(QueryTranslatorImpl q) throws QueryException { + } + +} diff --git a/src/org/hibernate/hql/classic/Parser.java b/src/org/hibernate/hql/classic/Parser.java new file mode 100644 index 0000000000..f4107d376c --- /dev/null +++ b/src/org/hibernate/hql/classic/Parser.java @@ -0,0 +1,26 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; + +/** + * A parser is a state machine that accepts a string of tokens, + * bounded by start() and end() and modifies a QueryTranslator. Parsers + * are NOT intended to be threadsafe. They SHOULD be reuseable + * for more than one token stream. + */ + +public interface Parser { + public void token(String token, QueryTranslatorImpl q) throws QueryException; + + public void start(QueryTranslatorImpl q) throws QueryException; + + public void end(QueryTranslatorImpl q) throws QueryException; +} + + + + + + + diff --git a/src/org/hibernate/hql/classic/ParserHelper.java b/src/org/hibernate/hql/classic/ParserHelper.java new file mode 100644 index 0000000000..142685d88a --- /dev/null +++ b/src/org/hibernate/hql/classic/ParserHelper.java @@ -0,0 +1,38 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; +import org.hibernate.util.StringHelper; + +import java.util.StringTokenizer; + +public final class ParserHelper { + + public static final String HQL_VARIABLE_PREFIX = ":"; + + public static final String HQL_SEPARATORS = " \n\r\f\t,()=<>&|+-=/*'^![]#~\\"; + //NOTICE: no " or . since they are part of (compound) identifiers + public static final String PATH_SEPARATORS = "."; + + public static boolean isWhitespace(String str) { + return StringHelper.WHITESPACE.indexOf( str ) > -1; + } + + private ParserHelper() { + //cannot instantiate + } + + public static void parse(Parser p, String text, String seperators, QueryTranslatorImpl q) throws QueryException { + StringTokenizer tokens = new StringTokenizer( text, seperators, true ); + p.start( q ); + while ( tokens.hasMoreElements() ) p.token( tokens.nextToken(), q ); + p.end( q ); + } + +} + + + + + + diff --git a/src/org/hibernate/hql/classic/PathExpressionParser.java b/src/org/hibernate/hql/classic/PathExpressionParser.java new file mode 100644 index 0000000000..bb82d37553 --- /dev/null +++ b/src/org/hibernate/hql/classic/PathExpressionParser.java @@ -0,0 +1,503 @@ +//$Id$ +package org.hibernate.hql.classic; + +import java.util.LinkedList; +import java.util.Map; + +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.engine.JoinSequence; +import org.hibernate.hql.CollectionSubqueryFactory; +import org.hibernate.persister.collection.CollectionPropertyMapping; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.JoinFragment; +import org.hibernate.type.AssociationType; +import org.hibernate.type.CollectionType; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; + +/** + * Parses an expression of the form foo.bar.baz and builds up an expression + * involving two less table joins than there are path components. + */ +public class PathExpressionParser implements Parser { + + //TODO: this class does too many things! we need a different + //kind of path expression parser for each of the diffferent + //ways in which path expressions can occur + + //We should actually rework this class to not implement Parser + //and just process path expressions in the most convenient way. + + //The class is now way to complex! + + private int dotcount; + private String currentName; + private String currentProperty; + private String oneToOneOwnerName; + private AssociationType ownerAssociationType; + private String[] columns; + private String collectionName; + private String collectionOwnerName; + private String collectionRole; + private final StringBuffer componentPath = new StringBuffer(); + private Type type; + private final StringBuffer path = new StringBuffer(); + private boolean ignoreInitialJoin; + private boolean continuation; + private int joinType = JoinFragment.INNER_JOIN; //default mode + private boolean useThetaStyleJoin = true; + private PropertyMapping currentPropertyMapping; + private JoinSequence joinSequence; + + private boolean expectingCollectionIndex; + private LinkedList collectionElements = new LinkedList(); + + void setJoinType(int joinType) { + this.joinType = joinType; + } + + void setUseThetaStyleJoin(boolean useThetaStyleJoin) { + this.useThetaStyleJoin = useThetaStyleJoin; + } + + private void addJoin(String name, AssociationType joinableType) throws QueryException { + try { + joinSequence.addJoin( joinableType, name, joinType, currentColumns() ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + } + + private void addJoin(String name, AssociationType joinableType, String[] foreignKeyColumns) throws QueryException { + try { + joinSequence.addJoin( joinableType, name, joinType, foreignKeyColumns ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + } + + String continueFromManyToMany(String entityName, String[] joinColumns, QueryTranslatorImpl q) throws QueryException { + start( q ); + continuation = true; + currentName = q.createNameFor( entityName ); + q.addType( currentName, entityName ); + Queryable classPersister = q.getEntityPersister( entityName ); + //QueryJoinFragment join = q.createJoinFragment(useThetaStyleJoin); + addJoin( currentName, TypeFactory.manyToOne( entityName ), joinColumns ); + currentPropertyMapping = classPersister; + return currentName; + } + + public void ignoreInitialJoin() { + ignoreInitialJoin = true; + } + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + + if ( token != null ) path.append( token ); + + String alias = q.getPathAlias( path.toString() ); + if ( alias != null ) { + reset( q ); //reset the dotcount (but not the path) + currentName = alias; //after reset! + currentPropertyMapping = q.getPropertyMapping( currentName ); + if ( !ignoreInitialJoin ) { + JoinSequence ojf = q.getPathJoin( path.toString() ); + try { + joinSequence.addCondition( ojf.toJoinFragment( q.getEnabledFilters(), true ).toWhereFragmentString() ); //after reset! + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + // we don't need to worry about any condition in the ON clause + // here (toFromFragmentString), since anything in the ON condition + // is already applied to the whole query + } + } + else if ( ".".equals( token ) ) { + dotcount++; + } + else { + if ( dotcount == 0 ) { + if ( !continuation ) { + if ( !q.isName( token ) ) throw new QueryException( "undefined alias: " + token ); + currentName = token; + currentPropertyMapping = q.getPropertyMapping( currentName ); + } + } + else if ( dotcount == 1 ) { + if ( currentName != null ) { + currentProperty = token; + } + else if ( collectionName != null ) { + //processCollectionProperty(token, q.getCollectionPersister(collectionRole), collectionName); + continuation = false; + } + else { + throw new QueryException( "unexpected" ); + } + } + else { // dotcount>=2 + + // Do the corresponding RHS + Type propertyType = getPropertyType(); + + if ( propertyType == null ) { + throw new QueryException( "unresolved property: " + path ); + } + + if ( propertyType.isComponentType() ) { + dereferenceComponent( token ); + } + else if ( propertyType.isEntityType() ) { + if ( !isCollectionValued() ) dereferenceEntity( token, ( EntityType ) propertyType, q ); + } + else if ( propertyType.isCollectionType() ) { + dereferenceCollection( token, ( ( CollectionType ) propertyType ).getRole(), q ); + + } + else { + if ( token != null ) throw new QueryException( "dereferenced: " + path ); + } + + } + } + } + + private void dereferenceEntity(String propertyName, EntityType propertyType, QueryTranslatorImpl q) + throws QueryException { + //NOTE: we avoid joining to the next table if the named property is just the foreign key value + + //if its "id" + boolean isIdShortcut = EntityPersister.ENTITY_ID.equals( propertyName ) && + propertyType.isReferenceToPrimaryKey(); + + //or its the id property name + final String idPropertyName; + try { + idPropertyName = propertyType.getIdentifierOrUniqueKeyPropertyName( q.getFactory() ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + boolean isNamedIdPropertyShortcut = idPropertyName != null + && idPropertyName.equals( propertyName ) + && propertyType.isReferenceToPrimaryKey(); + + if ( isIdShortcut || isNamedIdPropertyShortcut ) { + // special shortcut for id properties, skip the join! + // this must only occur at the _end_ of a path expression + if ( componentPath.length() > 0 ) componentPath.append( '.' ); + componentPath.append( propertyName ); + } + else { + String entityClass = propertyType.getAssociatedEntityName(); + String name = q.createNameFor( entityClass ); + q.addType( name, entityClass ); + addJoin( name, propertyType ); + if ( propertyType.isOneToOne() ) oneToOneOwnerName = currentName; + ownerAssociationType = propertyType; + currentName = name; + currentProperty = propertyName; + q.addPathAliasAndJoin( path.substring( 0, path.toString().lastIndexOf( '.' ) ), name, joinSequence.copy() ); + componentPath.setLength( 0 ); + currentPropertyMapping = q.getEntityPersister( entityClass ); + } + } + + private void dereferenceComponent(String propertyName) { + if ( propertyName != null ) { + if ( componentPath.length() > 0 ) componentPath.append( '.' ); + componentPath.append( propertyName ); + } + } + + private void dereferenceCollection(String propertyName, String role, QueryTranslatorImpl q) throws QueryException { + collectionRole = role; + QueryableCollection collPersister = q.getCollectionPersister( role ); + String name = q.createNameForCollection( role ); + addJoin( name, collPersister.getCollectionType() ); + //if ( collPersister.hasWhere() ) join.addCondition( collPersister.getSQLWhereString(name) ); + collectionName = name; + collectionOwnerName = currentName; + currentName = name; + currentProperty = propertyName; + componentPath.setLength( 0 ); + currentPropertyMapping = new CollectionPropertyMapping( collPersister ); + } + + private String getPropertyPath() { + if ( currentProperty == null ) { + return EntityPersister.ENTITY_ID; + } + else { + if ( componentPath.length() > 0 ) { + return new StringBuffer() + .append( currentProperty ) + .append( '.' ) + .append( componentPath.toString() ) + .toString(); + } + else { + return currentProperty; + } + } + } + + private PropertyMapping getPropertyMapping() { + return currentPropertyMapping; + } + + private void setType() throws QueryException { + if ( currentProperty == null ) { + type = getPropertyMapping().getType(); + } + else { + type = getPropertyType(); + } + } + + protected Type getPropertyType() throws QueryException { + String propertyPath = getPropertyPath(); + Type propertyType = getPropertyMapping().toType( propertyPath ); + if ( propertyType == null ) { + throw new QueryException( "could not resolve property type: " + propertyPath ); + } + return propertyType; + } + + protected String[] currentColumns() throws QueryException { + String propertyPath = getPropertyPath(); + String[] propertyColumns = getPropertyMapping().toColumns( currentName, propertyPath ); + if ( propertyColumns == null ) { + throw new QueryException( "could not resolve property columns: " + propertyPath ); + } + return propertyColumns; + } + + private void reset(QueryTranslatorImpl q) { + //join = q.createJoinFragment(useThetaStyleJoin); + dotcount = 0; + currentName = null; + currentProperty = null; + collectionName = null; + collectionRole = null; + componentPath.setLength( 0 ); + type = null; + collectionName = null; + columns = null; + expectingCollectionIndex = false; + continuation = false; + currentPropertyMapping = null; + } + + public void start(QueryTranslatorImpl q) { + if ( !continuation ) { + reset( q ); + path.setLength( 0 ); + joinSequence = new JoinSequence( q.getFactory() ).setUseThetaStyle( useThetaStyleJoin ); + } + } + + public void end(QueryTranslatorImpl q) throws QueryException { + ignoreInitialJoin = false; + + Type propertyType = getPropertyType(); + if ( propertyType != null && propertyType.isCollectionType() ) { + collectionRole = ( ( CollectionType ) propertyType ).getRole(); + collectionName = q.createNameForCollection( collectionRole ); + prepareForIndex( q ); + } + else { + columns = currentColumns(); + setType(); + } + + //important!! + continuation = false; + + } + + private void prepareForIndex(QueryTranslatorImpl q) throws QueryException { + + QueryableCollection collPersister = q.getCollectionPersister( collectionRole ); + + if ( !collPersister.hasIndex() ) throw new QueryException( "unindexed collection before []: " + path ); + String[] indexCols = collPersister.getIndexColumnNames(); + if ( indexCols.length != 1 ) throw new QueryException( "composite-index appears in []: " + path ); + //String[] keyCols = collPersister.getKeyColumnNames(); + + JoinSequence fromJoins = new JoinSequence( q.getFactory() ) + .setUseThetaStyle( useThetaStyleJoin ) + .setRoot( collPersister, collectionName ) + .setNext( joinSequence.copy() ); + + if ( !continuation ) addJoin( collectionName, collPersister.getCollectionType() ); + + joinSequence.addCondition( collectionName + '.' + indexCols[0] + " = " ); //TODO: get SQL rendering out of here + + CollectionElement elem = new CollectionElement(); + elem.elementColumns = collPersister.getElementColumnNames(collectionName); + elem.elementType = collPersister.getElementType(); + elem.isOneToMany = collPersister.isOneToMany(); + elem.alias = collectionName; + elem.joinSequence = joinSequence; + collectionElements.addLast( elem ); + setExpectingCollectionIndex(); + + q.addCollection( collectionName, collectionRole ); + q.addFromJoinOnly( collectionName, fromJoins ); + } + + static final class CollectionElement { + Type elementType; + boolean isOneToMany; + String alias; + String[] elementColumns; + JoinSequence joinSequence; + StringBuffer indexValue = new StringBuffer(); + } + + public CollectionElement lastCollectionElement() { + return ( CollectionElement ) collectionElements.removeLast(); + } + + public void setLastCollectionElementIndexValue(String value) { + ( ( CollectionElement ) collectionElements.getLast() ).indexValue.append( value ); + } + + public boolean isExpectingCollectionIndex() { + return expectingCollectionIndex; + } + + protected void setExpectingCollectionIndex() throws QueryException { + expectingCollectionIndex = true; + } + + public JoinSequence getWhereJoin() { + return joinSequence; + } + + public String getWhereColumn() throws QueryException { + if ( columns.length != 1 ) { + throw new QueryException( "path expression ends in a composite value: " + path ); + } + return columns[0]; + } + + public String[] getWhereColumns() { + return columns; + } + + public Type getWhereColumnType() { + return type; + } + + public String getName() { + return currentName == null ? collectionName : currentName; + } + + + public String getCollectionSubquery(Map enabledFilters) throws QueryException { + return CollectionSubqueryFactory.createCollectionSubquery( joinSequence, enabledFilters, currentColumns() ); + } + + public boolean isCollectionValued() throws QueryException { + //TODO: is there a better way? + return collectionName != null && !getPropertyType().isCollectionType(); + } + + public void addAssociation(QueryTranslatorImpl q) throws QueryException { + q.addJoin( getName(), joinSequence ); + } + + public String addFromAssociation(QueryTranslatorImpl q) throws QueryException { + if ( isCollectionValued() ) { + return addFromCollection( q ); + } + else { + q.addFrom( currentName, joinSequence ); + return currentName; + } + } + + public String addFromCollection(QueryTranslatorImpl q) throws QueryException { + Type collectionElementType = getPropertyType(); + + if ( collectionElementType == null ) { + throw new QueryException( "must specify 'elements' for collection valued property in from clause: " + path ); + } + + if ( collectionElementType.isEntityType() ) { + // an association + QueryableCollection collectionPersister = q.getCollectionPersister( collectionRole ); + Queryable entityPersister = ( Queryable ) collectionPersister.getElementPersister(); + String clazz = entityPersister.getEntityName(); + + final String elementName; + if ( collectionPersister.isOneToMany() ) { + elementName = collectionName; + //allow index() function: + q.decoratePropertyMapping( elementName, collectionPersister ); + } + else { //many-to-many + q.addCollection( collectionName, collectionRole ); + elementName = q.createNameFor( clazz ); + addJoin( elementName, ( AssociationType ) collectionElementType ); + } + q.addFrom( elementName, clazz, joinSequence ); + currentPropertyMapping = new CollectionPropertyMapping( collectionPersister ); + return elementName; + } + else { + // collections of values + q.addFromCollection( collectionName, collectionRole, joinSequence ); + return collectionName; + } + + } + + String getCollectionName() { + return collectionName; + } + + String getCollectionRole() { + return collectionRole; + } + + String getCollectionOwnerName() { + return collectionOwnerName; + } + + String getOneToOneOwnerName() { + return oneToOneOwnerName; + } + + AssociationType getOwnerAssociationType() { + return ownerAssociationType; + } + + String getCurrentProperty() { + return currentProperty; + } + + String getCurrentName() { + return currentName; + } + + public void fetch(QueryTranslatorImpl q, String entityName) throws QueryException { + if ( isCollectionValued() ) { + q.setCollectionToFetch( getCollectionRole(), getCollectionName(), getCollectionOwnerName(), entityName ); + } + else { + q.addEntityToFetch( entityName, getOneToOneOwnerName(), getOwnerAssociationType() ); + } + } +} diff --git a/src/org/hibernate/hql/classic/PreprocessingParser.java b/src/org/hibernate/hql/classic/PreprocessingParser.java new file mode 100644 index 0000000000..c890664c3d --- /dev/null +++ b/src/org/hibernate/hql/classic/PreprocessingParser.java @@ -0,0 +1,133 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; +import org.hibernate.hql.CollectionProperties; +import org.hibernate.util.StringHelper; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * + */ +public class PreprocessingParser implements Parser { + + private static final Set HQL_OPERATORS; + + static { + HQL_OPERATORS = new HashSet(); + HQL_OPERATORS.add( "<=" ); + HQL_OPERATORS.add( ">=" ); + HQL_OPERATORS.add( "=>" ); + HQL_OPERATORS.add( "=<" ); + HQL_OPERATORS.add( "!=" ); + HQL_OPERATORS.add( "<>" ); + HQL_OPERATORS.add( "!#" ); + HQL_OPERATORS.add( "!~" ); + HQL_OPERATORS.add( "!<" ); + HQL_OPERATORS.add( "!>" ); + HQL_OPERATORS.add( "is not" ); + HQL_OPERATORS.add( "not like" ); + HQL_OPERATORS.add( "not in" ); + HQL_OPERATORS.add( "not between" ); + HQL_OPERATORS.add( "not exists" ); + } + + private Map replacements; + private boolean quoted; + private StringBuffer quotedString; + private ClauseParser parser = new ClauseParser(); + private String lastToken; + private String currentCollectionProp; + + public PreprocessingParser(Map replacements) { + this.replacements = replacements; + } + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + + //handle quoted strings + if ( quoted ) { + quotedString.append( token ); + } + if ( "'".equals( token ) ) { + if ( quoted ) { + token = quotedString.toString(); + } + else { + quotedString = new StringBuffer( 20 ).append( token ); + } + quoted = !quoted; + } + if ( quoted ) return; + + //ignore whitespace + if ( ParserHelper.isWhitespace( token ) ) return; + + //do replacements + String substoken = ( String ) replacements.get( token ); + token = ( substoken == null ) ? token : substoken; + + //handle HQL2 collection syntax + if ( currentCollectionProp != null ) { + if ( "(".equals( token ) ) { + return; + } + else if ( ")".equals( token ) ) { + currentCollectionProp = null; + return; + } + else { + token = StringHelper.qualify( token, currentCollectionProp ); + } + } + else { + String prop = CollectionProperties.getNormalizedPropertyName( token.toLowerCase() ); + if ( prop != null ) { + currentCollectionProp = prop; + return; + } + } + + + //handle <=, >=, !=, is not, not between, not in + if ( lastToken == null ) { + lastToken = token; + } + else { + String doubleToken = ( token.length() > 1 ) ? + lastToken + ' ' + token : + lastToken + token; + if ( HQL_OPERATORS.contains( doubleToken.toLowerCase() ) ) { + parser.token( doubleToken, q ); + lastToken = null; + } + else { + parser.token( lastToken, q ); + lastToken = token; + } + } + + } + + public void start(QueryTranslatorImpl q) throws QueryException { + quoted = false; + parser.start( q ); + } + + public void end(QueryTranslatorImpl q) throws QueryException { + if ( lastToken != null ) parser.token( lastToken, q ); + parser.end( q ); + lastToken = null; + currentCollectionProp = null; + } + +} + + + + + + diff --git a/src/org/hibernate/hql/classic/QueryTranslatorImpl.java b/src/org/hibernate/hql/classic/QueryTranslatorImpl.java new file mode 100644 index 0000000000..16a7a1eafa --- /dev/null +++ b/src/org/hibernate/hql/classic/QueryTranslatorImpl.java @@ -0,0 +1,1161 @@ +//$Id$ +package org.hibernate.hql.classic; + +import java.io.Serializable; +import java.lang.reflect.Constructor; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.collections.SequencedHashMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.ScrollableResults; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.JoinSequence; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.event.EventSource; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.hql.FilterTranslator; +import org.hibernate.hql.HolderInstantiator; +import org.hibernate.hql.NameGenerator; +import org.hibernate.hql.ParameterTranslations; +import org.hibernate.impl.IteratorImpl; +import org.hibernate.loader.BasicLoader; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.collection.QueryableCollection; +import org.hibernate.persister.entity.Loadable; +import org.hibernate.persister.entity.PropertyMapping; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.JoinFragment; +import org.hibernate.sql.QuerySelect; +import org.hibernate.transform.ResultTransformer; +import org.hibernate.type.AssociationType; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +/** + * An instance of QueryTranslator translates a Hibernate + * query string to SQL. + */ +public class QueryTranslatorImpl extends BasicLoader implements FilterTranslator { + + private static final String[] NO_RETURN_ALIASES = new String[] {}; + + private final String queryIdentifier; + private final String queryString; + + private final Map typeMap = new SequencedHashMap(); + private final Map collections = new SequencedHashMap(); + private List returnedTypes = new ArrayList(); + private final List fromTypes = new ArrayList(); + private final List scalarTypes = new ArrayList(); + private final Map namedParameters = new HashMap(); + private final Map aliasNames = new HashMap(); + private final Map oneToOneOwnerNames = new HashMap(); + private final Map uniqueKeyOwnerReferences = new HashMap(); + private final Map decoratedPropertyMappings = new HashMap(); + + private final List scalarSelectTokens = new ArrayList(); + private final List whereTokens = new ArrayList(); + private final List havingTokens = new ArrayList(); + private final Map joins = new SequencedHashMap(); + private final List orderByTokens = new ArrayList(); + private final List groupByTokens = new ArrayList(); + private final Set querySpaces = new HashSet(); + private final Set entitiesToFetch = new HashSet(); + + private final Map pathAliases = new HashMap(); + private final Map pathJoins = new HashMap(); + + private Queryable[] persisters; + private int[] owners; + private EntityType[] ownerAssociationTypes; + private String[] names; + private boolean[] includeInSelect; + private int selectLength; + private Type[] returnTypes; + private Type[] actualReturnTypes; + private String[][] scalarColumnNames; + private Map tokenReplacements; + private int nameCount = 0; + private int parameterCount = 0; + private boolean distinct = false; + private boolean compiled; + private String sqlString; + private Class holderClass; + private Constructor holderConstructor; + private boolean hasScalars; + private boolean shallowQuery; + private QueryTranslatorImpl superQuery; + + private QueryableCollection collectionPersister; + private int collectionOwnerColumn = -1; + private String collectionOwnerName; + private String fetchName; + + private String[] suffixes; + + private Map enabledFilters; + + private static final Log log = LogFactory.getLog( QueryTranslatorImpl.class ); + + /** + * Construct a query translator + * + * @param queryIdentifier A unique identifier for the query of which this + * translation is part; typically this is the original, user-supplied query string. + * @param queryString The "preprocessed" query string; at the very least + * already processed by {@link org.hibernate.hql.QuerySplitter}. + * @param enabledFilters Any enabled filters. + * @param factory The session factory. + */ + public QueryTranslatorImpl( + String queryIdentifier, + String queryString, + Map enabledFilters, + SessionFactoryImplementor factory) { + super( factory ); + this.queryIdentifier = queryIdentifier; + this.queryString = queryString; + this.enabledFilters = enabledFilters; + } + + /** + * Construct a query translator; this form used internally. + * + * @param queryString The query string to process. + * @param enabledFilters Any enabled filters. + * @param factory The session factory. + */ + public QueryTranslatorImpl( + String queryString, + Map enabledFilters, + SessionFactoryImplementor factory) { + this( queryString, queryString, enabledFilters, factory ); + } + + /** + * Compile a subquery. + * + * @param superquery The containing query of the query to be compiled. + * + * @throws org.hibernate.MappingException Indicates problems resolving + * things referenced in the query. + * @throws org.hibernate.QueryException Generally some form of syntatic + * failure. + */ + void compile(QueryTranslatorImpl superquery) throws QueryException, MappingException { + this.tokenReplacements = superquery.tokenReplacements; + this.superQuery = superquery; + this.shallowQuery = true; + this.enabledFilters = superquery.getEnabledFilters(); + compile(); + } + + + /** + * Compile a "normal" query. This method may be called multiple + * times. Subsequent invocations are no-ops. + */ + public synchronized void compile( + Map replacements, + boolean scalar) throws QueryException, MappingException { + if ( !compiled ) { + this.tokenReplacements = replacements; + this.shallowQuery = scalar; + compile(); + } + } + + /** + * Compile a filter. This method may be called multiple + * times. Subsequent invocations are no-ops. + */ + public synchronized void compile( + String collectionRole, + Map replacements, + boolean scalar) throws QueryException, MappingException { + + if ( !isCompiled() ) { + addFromAssociation( "this", collectionRole ); + compile( replacements, scalar ); + } + } + + /** + * Compile the query (generate the SQL). + * + * @throws org.hibernate.MappingException Indicates problems resolving + * things referenced in the query. + * @throws org.hibernate.QueryException Generally some form of syntatic + * failure. + */ + private void compile() throws QueryException, MappingException { + + log.trace( "compiling query" ); + try { + ParserHelper.parse( new PreprocessingParser( tokenReplacements ), + queryString, + ParserHelper.HQL_SEPARATORS, + this ); + renderSQL(); + } + catch ( QueryException qe ) { + qe.setQueryString( queryString ); + throw qe; + } + catch ( MappingException me ) { + throw me; + } + catch ( Exception e ) { + log.debug( "unexpected query compilation problem", e ); + e.printStackTrace(); + QueryException qe = new QueryException( "Incorrect query syntax", e ); + qe.setQueryString( queryString ); + throw qe; + } + + postInstantiate(); + + compiled = true; + + } + + public String getSQLString() { + return sqlString; + } + + public List collectSqlStrings() { + return ArrayHelper.toList( new String[] { sqlString } ); + } + + public String getQueryString() { + return queryString; + } + + /** + * Persisters for the return values of a find() style query. + * + * @return an array of EntityPersisters. + */ + protected Loadable[] getEntityPersisters() { + return persisters; + } + + /** + * Types of the return values of an iterate() style query. + * + * @return an array of Types. + */ + public Type[] getReturnTypes() { + return actualReturnTypes; + } + + public String[] getReturnAliases() { + // return aliases not supported in classic translator! + return NO_RETURN_ALIASES; + } + + public String[][] getColumnNames() { + return scalarColumnNames; + } + + private static void logQuery(String hql, String sql) { + if ( log.isDebugEnabled() ) { + log.debug( "HQL: " + hql ); + log.debug( "SQL: " + sql ); + } + } + + void setAliasName(String alias, String name) { + aliasNames.put( alias, name ); + } + + public String getAliasName(String alias) { + String name = ( String ) aliasNames.get( alias ); + if ( name == null ) { + if ( superQuery != null ) { + name = superQuery.getAliasName( alias ); + } + else { + name = alias; + } + } + return name; + } + + String unalias(String path) { + String alias = StringHelper.root( path ); + String name = getAliasName( alias ); + if ( name != null ) { + return name + path.substring( alias.length() ); + } + else { + return path; + } + } + + void addEntityToFetch(String name, String oneToOneOwnerName, AssociationType ownerAssociationType) { + addEntityToFetch( name ); + if ( oneToOneOwnerName != null ) oneToOneOwnerNames.put( name, oneToOneOwnerName ); + if ( ownerAssociationType != null ) uniqueKeyOwnerReferences.put( name, ownerAssociationType ); + } + + private void addEntityToFetch(String name) { + entitiesToFetch.add( name ); + } + + private int nextCount() { + return ( superQuery == null ) ? nameCount++ : superQuery.nameCount++; + } + + String createNameFor(String type) { + return StringHelper.generateAlias( type, nextCount() ); + } + + String createNameForCollection(String role) { + return StringHelper.generateAlias( role, nextCount() ); + } + + private String getType(String name) { + String type = ( String ) typeMap.get( name ); + if ( type == null && superQuery != null ) { + type = superQuery.getType( name ); + } + return type; + } + + private String getRole(String name) { + String role = ( String ) collections.get( name ); + if ( role == null && superQuery != null ) { + role = superQuery.getRole( name ); + } + return role; + } + + boolean isName(String name) { + return aliasNames.containsKey( name ) || + typeMap.containsKey( name ) || + collections.containsKey( name ) || ( + superQuery != null && superQuery.isName( name ) + ); + } + + PropertyMapping getPropertyMapping(String name) throws QueryException { + PropertyMapping decorator = getDecoratedPropertyMapping( name ); + if ( decorator != null ) return decorator; + + String type = getType( name ); + if ( type == null ) { + String role = getRole( name ); + if ( role == null ) { + throw new QueryException( "alias not found: " + name ); + } + return getCollectionPersister( role ); //.getElementPropertyMapping(); + } + else { + Queryable persister = getEntityPersister( type ); + if ( persister == null ) throw new QueryException( "persistent class not found: " + type ); + return persister; + } + } + + private PropertyMapping getDecoratedPropertyMapping(String name) { + return ( PropertyMapping ) decoratedPropertyMappings.get( name ); + } + + void decoratePropertyMapping(String name, PropertyMapping mapping) { + decoratedPropertyMappings.put( name, mapping ); + } + + private Queryable getEntityPersisterForName(String name) throws QueryException { + String type = getType( name ); + Queryable persister = getEntityPersister( type ); + if ( persister == null ) throw new QueryException( "persistent class not found: " + type ); + return persister; + } + + Queryable getEntityPersisterUsingImports(String className) { + final String importedClassName = getFactory().getImportedClassName( className ); + if ( importedClassName == null ) { + return null; + } + try { + return ( Queryable ) getFactory().getEntityPersister( importedClassName ); + } + catch ( MappingException me ) { + return null; + } + } + + Queryable getEntityPersister(String entityName) throws QueryException { + try { + return ( Queryable ) getFactory().getEntityPersister( entityName ); + } + catch ( Exception e ) { + throw new QueryException( "persistent class not found: " + entityName ); + } + } + + QueryableCollection getCollectionPersister(String role) throws QueryException { + try { + return ( QueryableCollection ) getFactory().getCollectionPersister( role ); + } + catch ( ClassCastException cce ) { + throw new QueryException( "collection role is not queryable: " + role ); + } + catch ( Exception e ) { + throw new QueryException( "collection role not found: " + role ); + } + } + + void addType(String name, String type) { + typeMap.put( name, type ); + } + + void addCollection(String name, String role) { + collections.put( name, role ); + } + + void addFrom(String name, String type, JoinSequence joinSequence) + throws QueryException { + addType( name, type ); + addFrom( name, joinSequence ); + } + + void addFromCollection(String name, String collectionRole, JoinSequence joinSequence) + throws QueryException { + //register collection role + addCollection( name, collectionRole ); + addJoin( name, joinSequence ); + } + + void addFrom(String name, JoinSequence joinSequence) + throws QueryException { + fromTypes.add( name ); + addJoin( name, joinSequence ); + } + + void addFromClass(String name, Queryable classPersister) + throws QueryException { + JoinSequence joinSequence = new JoinSequence( getFactory() ) + .setRoot( classPersister, name ); + //crossJoins.add(name); + addFrom( name, classPersister.getEntityName(), joinSequence ); + } + + void addSelectClass(String name) { + returnedTypes.add( name ); + } + + void addSelectScalar(Type type) { + scalarTypes.add( type ); + } + + void appendWhereToken(String token) { + whereTokens.add( token ); + } + + void appendHavingToken(String token) { + havingTokens.add( token ); + } + + void appendOrderByToken(String token) { + orderByTokens.add( token ); + } + + void appendGroupByToken(String token) { + groupByTokens.add( token ); + } + + void appendScalarSelectToken(String token) { + scalarSelectTokens.add( token ); + } + + void appendScalarSelectTokens(String[] tokens) { + scalarSelectTokens.add( tokens ); + } + + void addFromJoinOnly(String name, JoinSequence joinSequence) throws QueryException { + addJoin( name, joinSequence.getFromPart() ); + } + + void addJoin(String name, JoinSequence joinSequence) throws QueryException { + if ( !joins.containsKey( name ) ) joins.put( name, joinSequence ); + } + + void addNamedParameter(String name) { + if ( superQuery != null ) superQuery.addNamedParameter( name ); + Integer loc = new Integer( parameterCount++ ); + Object o = namedParameters.get( name ); + if ( o == null ) { + namedParameters.put( name, loc ); + } + else if ( o instanceof Integer ) { + ArrayList list = new ArrayList( 4 ); + list.add( o ); + list.add( loc ); + namedParameters.put( name, list ); + } + else { + ( ( ArrayList ) o ).add( loc ); + } + } + + public int[] getNamedParameterLocs(String name) throws QueryException { + Object o = namedParameters.get( name ); + if ( o == null ) { + QueryException qe = new QueryException( ERROR_NAMED_PARAMETER_DOES_NOT_APPEAR + name ); + qe.setQueryString( queryString ); + throw qe; + } + if ( o instanceof Integer ) { + return new int[]{ ( ( Integer ) o ).intValue() }; + } + else { + return ArrayHelper.toIntArray( ( ArrayList ) o ); + } + } + + private void renderSQL() throws QueryException, MappingException { + + final int rtsize; + if ( returnedTypes.size() == 0 && scalarTypes.size() == 0 ) { + //ie no select clause in HQL + returnedTypes = fromTypes; + rtsize = returnedTypes.size(); + } + else { + rtsize = returnedTypes.size(); + Iterator iter = entitiesToFetch.iterator(); + while ( iter.hasNext() ) { + returnedTypes.add( iter.next() ); + } + } + int size = returnedTypes.size(); + persisters = new Queryable[size]; + names = new String[size]; + owners = new int[size]; + ownerAssociationTypes = new EntityType[size]; + suffixes = new String[size]; + includeInSelect = new boolean[size]; + for ( int i = 0; i < size; i++ ) { + String name = ( String ) returnedTypes.get( i ); + //if ( !isName(name) ) throw new QueryException("unknown type: " + name); + persisters[i] = getEntityPersisterForName( name ); + // TODO: cannot use generateSuffixes() - it handles the initial suffix differently. + suffixes[i] = ( size == 1 ) ? "" : Integer.toString( i ) + '_'; + names[i] = name; + includeInSelect[i] = !entitiesToFetch.contains( name ); + if ( includeInSelect[i] ) selectLength++; + if ( name.equals( collectionOwnerName ) ) collectionOwnerColumn = i; + String oneToOneOwner = ( String ) oneToOneOwnerNames.get( name ); + owners[i] = ( oneToOneOwner == null ) ? -1 : returnedTypes.indexOf( oneToOneOwner ); + ownerAssociationTypes[i] = (EntityType) uniqueKeyOwnerReferences.get( name ); + } + + if ( ArrayHelper.isAllNegative( owners ) ) owners = null; + + String scalarSelect = renderScalarSelect(); //Must be done here because of side-effect! yuck... + + int scalarSize = scalarTypes.size(); + hasScalars = scalarTypes.size() != rtsize; + + returnTypes = new Type[scalarSize]; + for ( int i = 0; i < scalarSize; i++ ) { + returnTypes[i] = ( Type ) scalarTypes.get( i ); + } + + QuerySelect sql = new QuerySelect( getFactory().getDialect() ); + sql.setDistinct( distinct ); + + if ( !shallowQuery ) { + renderIdentifierSelect( sql ); + renderPropertiesSelect( sql ); + } + + if ( collectionPersister != null ) { + sql.addSelectFragmentString( collectionPersister.selectFragment( fetchName, "__" ) ); + } + + if ( hasScalars || shallowQuery ) sql.addSelectFragmentString( scalarSelect ); + + //TODO: for some dialects it would be appropriate to add the renderOrderByPropertiesSelect() to other select strings + mergeJoins( sql.getJoinFragment() ); + + sql.setWhereTokens( whereTokens.iterator() ); + + sql.setGroupByTokens( groupByTokens.iterator() ); + sql.setHavingTokens( havingTokens.iterator() ); + sql.setOrderByTokens( orderByTokens.iterator() ); + + if ( collectionPersister != null && collectionPersister.hasOrdering() ) { + sql.addOrderBy( collectionPersister.getSQLOrderByString( fetchName ) ); + } + + scalarColumnNames = NameGenerator.generateColumnNames( returnTypes, getFactory() ); + + // initialize the Set of queried identifier spaces (ie. tables) + Iterator iter = collections.values().iterator(); + while ( iter.hasNext() ) { + CollectionPersister p = getCollectionPersister( ( String ) iter.next() ); + addQuerySpaces( p.getCollectionSpaces() ); + } + iter = typeMap.keySet().iterator(); + while ( iter.hasNext() ) { + Queryable p = getEntityPersisterForName( ( String ) iter.next() ); + addQuerySpaces( p.getQuerySpaces() ); + } + + sqlString = sql.toQueryString(); + + if ( holderClass != null ) holderConstructor = ReflectHelper.getConstructor( holderClass, returnTypes ); + + if ( hasScalars ) { + actualReturnTypes = returnTypes; + } + else { + actualReturnTypes = new Type[selectLength]; + int j = 0; + for ( int i = 0; i < persisters.length; i++ ) { + if ( includeInSelect[i] ) { + actualReturnTypes[j++] = TypeFactory.manyToOne( persisters[i].getEntityName(), shallowQuery ); + } + } + } + + } + + private void renderIdentifierSelect(QuerySelect sql) { + int size = returnedTypes.size(); + + for ( int k = 0; k < size; k++ ) { + String name = ( String ) returnedTypes.get( k ); + String suffix = size == 1 ? "" : Integer.toString( k ) + '_'; + sql.addSelectFragmentString( persisters[k].identifierSelectFragment( name, suffix ) ); + } + + } + + /*private String renderOrderByPropertiesSelect() { + StringBuffer buf = new StringBuffer(10); + + //add the columns we are ordering by to the select ID select clause + Iterator iter = orderByTokens.iterator(); + while ( iter.hasNext() ) { + String token = (String) iter.next(); + if ( token.lastIndexOf(".") > 0 ) { + //ie. it is of form "foo.bar", not of form "asc" or "desc" + buf.append(StringHelper.COMMA_SPACE).append(token); + } + } + + return buf.toString(); + }*/ + + private void renderPropertiesSelect(QuerySelect sql) { + int size = returnedTypes.size(); + for ( int k = 0; k < size; k++ ) { + String suffix = size == 1 ? "" : Integer.toString( k ) + '_'; + String name = ( String ) returnedTypes.get( k ); + sql.addSelectFragmentString( persisters[k].propertySelectFragment( name, suffix, false ) ); + } + } + + /** + * WARNING: side-effecty + */ + private String renderScalarSelect() { + + boolean isSubselect = superQuery != null; + + StringBuffer buf = new StringBuffer( 20 ); + + if ( scalarTypes.size() == 0 ) { + //ie. no select clause + int size = returnedTypes.size(); + for ( int k = 0; k < size; k++ ) { + + scalarTypes.add( TypeFactory.manyToOne( persisters[k].getEntityName(), shallowQuery ) ); + + String[] idColumnNames = persisters[k].getIdentifierColumnNames(); + for ( int i = 0; i < idColumnNames.length; i++ ) { + buf.append( returnedTypes.get( k ) ).append( '.' ).append( idColumnNames[i] ); + if ( !isSubselect ) buf.append( " as " ).append( NameGenerator.scalarName( k, i ) ); + if ( i != idColumnNames.length - 1 || k != size - 1 ) buf.append( ", " ); + } + + } + + } + else { + //there _was_ a select clause + Iterator iter = scalarSelectTokens.iterator(); + int c = 0; + boolean nolast = false; //real hacky... + int parenCount = 0; // used to count the nesting of parentheses + while ( iter.hasNext() ) { + Object next = iter.next(); + if ( next instanceof String ) { + String token = ( String ) next; + + if ( "(".equals( token ) ) { + parenCount++; + } + else if ( ")".equals( token ) ) { + parenCount--; + } + + String lc = token.toLowerCase(); + if ( lc.equals( ", " ) ) { + if ( nolast ) { + nolast = false; + } + else { + if ( !isSubselect && parenCount == 0 ) { + int x = c++; + buf.append( " as " ) + .append( NameGenerator.scalarName( x, 0 ) ); + } + } + } + buf.append( token ); + if ( lc.equals( "distinct" ) || lc.equals( "all" ) ) { + buf.append( ' ' ); + } + } + else { + nolast = true; + String[] tokens = ( String[] ) next; + for ( int i = 0; i < tokens.length; i++ ) { + buf.append( tokens[i] ); + if ( !isSubselect ) { + buf.append( " as " ) + .append( NameGenerator.scalarName( c, i ) ); + } + if ( i != tokens.length - 1 ) buf.append( ", " ); + } + c++; + } + } + if ( !isSubselect && !nolast ) { + int x = c++; + buf.append( " as " ) + .append( NameGenerator.scalarName( x, 0 ) ); + } + + } + + return buf.toString(); + } + + private void mergeJoins(JoinFragment ojf) throws MappingException, QueryException { + + Iterator iter = joins.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry me = ( Map.Entry ) iter.next(); + String name = ( String ) me.getKey(); + JoinSequence join = ( JoinSequence ) me.getValue(); + join.setSelector( new JoinSequence.Selector() { + public boolean includeSubclasses(String alias) { + boolean include = returnedTypes.contains( alias ) && !isShallowQuery(); + return include; + } + } ); + + if ( typeMap.containsKey( name ) ) { + ojf.addFragment( join.toJoinFragment( enabledFilters, true ) ); + } + else if ( collections.containsKey( name ) ) { + ojf.addFragment( join.toJoinFragment( enabledFilters, true ) ); + } + else { + //name from a super query (a bit inelegant that it shows up here) + } + + } + + } + + public final Set getQuerySpaces() { + return querySpaces; + } + + /** + * Is this query called by scroll() or iterate()? + * + * @return true if it is, false if it is called by find() or list() + */ + boolean isShallowQuery() { + return shallowQuery; + } + + void addQuerySpaces(Serializable[] spaces) { + for ( int i = 0; i < spaces.length; i++ ) { + querySpaces.add( spaces[i] ); + } + if ( superQuery != null ) superQuery.addQuerySpaces( spaces ); + } + + void setDistinct(boolean distinct) { + this.distinct = distinct; + } + + boolean isSubquery() { + return superQuery != null; + } + + /** + * Overrides method from Loader + */ + public CollectionPersister[] getCollectionPersisters() { + return collectionPersister == null ? null : new CollectionPersister[] { collectionPersister }; + } + + protected String[] getCollectionSuffixes() { + return collectionPersister == null ? null : new String[] { "__" }; + } + + void setCollectionToFetch(String role, String name, String ownerName, String entityName) + throws QueryException { + fetchName = name; + collectionPersister = getCollectionPersister( role ); + collectionOwnerName = ownerName; + if ( collectionPersister.getElementType().isEntityType() ) { + addEntityToFetch( entityName ); + } + } + + protected String[] getSuffixes() { + return suffixes; + } + + protected String[] getAliases() { + return names; + } + + /** + * Used for collection filters + */ + private void addFromAssociation(final String elementName, final String collectionRole) + throws QueryException { + //q.addCollection(collectionName, collectionRole); + QueryableCollection persister = getCollectionPersister( collectionRole ); + Type collectionElementType = persister.getElementType(); + if ( !collectionElementType.isEntityType() ) { + throw new QueryException( "collection of values in filter: " + elementName ); + } + + String[] keyColumnNames = persister.getKeyColumnNames(); + //if (keyColumnNames.length!=1) throw new QueryException("composite-key collection in filter: " + collectionRole); + + String collectionName; + JoinSequence join = new JoinSequence( getFactory() ); + collectionName = persister.isOneToMany() ? + elementName : + createNameForCollection( collectionRole ); + join.setRoot( persister, collectionName ); + if ( !persister.isOneToMany() ) { + //many-to-many + addCollection( collectionName, collectionRole ); + try { + join.addJoin( ( AssociationType ) persister.getElementType(), + elementName, + JoinFragment.INNER_JOIN, + persister.getElementColumnNames(collectionName) ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + } + join.addCondition( collectionName, keyColumnNames, " = ?" ); + //if ( persister.hasWhere() ) join.addCondition( persister.getSQLWhereString(collectionName) ); + EntityType elemType = ( EntityType ) collectionElementType; + addFrom( elementName, elemType.getAssociatedEntityName(), join ); + + } + + String getPathAlias(String path) { + return ( String ) pathAliases.get( path ); + } + + JoinSequence getPathJoin(String path) { + return ( JoinSequence ) pathJoins.get( path ); + } + + void addPathAliasAndJoin(String path, String alias, JoinSequence joinSequence) { + pathAliases.put( path, alias ); + pathJoins.put( path, joinSequence ); + } + + public List list(SessionImplementor session, QueryParameters queryParameters) + throws HibernateException { + return list( session, queryParameters, getQuerySpaces(), actualReturnTypes ); + } + + /** + * Return the query results as an iterator + */ + public Iterator iterate(QueryParameters queryParameters, EventSource session) + throws HibernateException { + + boolean stats = session.getFactory().getStatistics().isStatisticsEnabled(); + long startTime = 0; + if ( stats ) startTime = System.currentTimeMillis(); + + try { + + PreparedStatement st = prepareQueryStatement( queryParameters, false, session ); + ResultSet rs = getResultSet( st, queryParameters.hasAutoDiscoverScalarTypes(), false, queryParameters.getRowSelection(), session ); + HolderInstantiator hi = HolderInstantiator.createClassicHolderInstantiator(holderConstructor, queryParameters.getResultTransformer()); + Iterator result = new IteratorImpl( rs, st, session, returnTypes, getColumnNames(), hi ); + + if ( stats ) { + session.getFactory().getStatisticsImplementor().queryExecuted( + "HQL: " + queryString, + 0, + System.currentTimeMillis() - startTime + ); + } + + return result; + + } + catch ( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + getFactory().getSQLExceptionConverter(), + sqle, + "could not execute query using iterate", + getSQLString() + ); + } + + } + + public int executeUpdate(QueryParameters queryParameters, SessionImplementor session) throws HibernateException { + throw new UnsupportedOperationException( "Not supported! Use the AST translator..."); + } + + protected Object getResultColumnOrRow(Object[] row, ResultTransformer transformer, ResultSet rs, SessionImplementor session) + throws SQLException, HibernateException { + row = toResultRow( row ); + if ( hasScalars ) { + String[][] scalarColumns = getColumnNames(); + int queryCols = returnTypes.length; + if ( holderClass == null && queryCols == 1 ) { + return returnTypes[0].nullSafeGet( rs, scalarColumns[0], session, null ); + } + else { + row = new Object[queryCols]; + for ( int i = 0; i < queryCols; i++ ) + row[i] = returnTypes[i].nullSafeGet( rs, scalarColumns[i], session, null ); + return row; + } + } + else if ( holderClass == null ) { + return row.length == 1 ? row[0] : row; + } + else { + return row; + } + + } + + protected List getResultList(List results, ResultTransformer resultTransformer) throws QueryException { + if ( holderClass != null ) { + for ( int i = 0; i < results.size(); i++ ) { + Object[] row = ( Object[] ) results.get( i ); + try { + results.set( i, holderConstructor.newInstance( row ) ); + } + catch ( Exception e ) { + throw new QueryException( "could not instantiate: " + holderClass, e ); + } + } + } + return results; + } + + private Object[] toResultRow(Object[] row) { + if ( selectLength == row.length ) { + return row; + } + else { + Object[] result = new Object[selectLength]; + int j = 0; + for ( int i = 0; i < row.length; i++ ) { + if ( includeInSelect[i] ) result[j++] = row[i]; + } + return result; + } + } + + void setHolderClass(Class clazz) { + holderClass = clazz; + } + + protected LockMode[] getLockModes(Map lockModes) { + // unfortunately this stuff can't be cached because + // it is per-invocation, not constant for the + // QueryTranslator instance + HashMap nameLockModes = new HashMap(); + if ( lockModes != null ) { + Iterator iter = lockModes.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry me = ( Map.Entry ) iter.next(); + nameLockModes.put( getAliasName( ( String ) me.getKey() ), + me.getValue() ); + } + } + LockMode[] lockModeArray = new LockMode[names.length]; + for ( int i = 0; i < names.length; i++ ) { + LockMode lm = ( LockMode ) nameLockModes.get( names[i] ); + if ( lm == null ) lm = LockMode.NONE; + lockModeArray[i] = lm; + } + return lockModeArray; + } + + protected String applyLocks(String sql, Map lockModes, Dialect dialect) throws QueryException { + // can't cache this stuff either (per-invocation) + final String result; + if ( lockModes == null || lockModes.size() == 0 ) { + result = sql; + } + else { + Map aliasedLockModes = new HashMap(); + Iterator iter = lockModes.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry me = ( Map.Entry ) iter.next(); + aliasedLockModes.put( getAliasName( ( String ) me.getKey() ), me.getValue() ); + } + Map keyColumnNames = null; + if ( dialect.forUpdateOfColumns() ) { + keyColumnNames = new HashMap(); + for ( int i = 0; i < names.length; i++ ) { + keyColumnNames.put( names[i], persisters[i].getIdentifierColumnNames() ); + } + } + result = dialect.applyLocksToSql( sql, aliasedLockModes, keyColumnNames ); + } + logQuery( queryString, result ); + return result; + } + + protected boolean upgradeLocks() { + return true; + } + + protected int[] getCollectionOwners() { + return new int[] { collectionOwnerColumn }; + } + + protected boolean isCompiled() { + return compiled; + } + + public String toString() { + return queryString; + } + + protected int[] getOwners() { + return owners; + } + + protected EntityType[] getOwnerAssociationTypes() { + return ownerAssociationTypes; + } + + public Class getHolderClass() { + return holderClass; + } + + public Map getEnabledFilters() { + return enabledFilters; + } + + public ScrollableResults scroll(final QueryParameters queryParameters, + final SessionImplementor session) + throws HibernateException { + HolderInstantiator hi = HolderInstantiator.createClassicHolderInstantiator(holderConstructor, queryParameters.getResultTransformer()); + return scroll( queryParameters, returnTypes, hi, session ); + } + + public String getQueryIdentifier() { + return queryIdentifier; + } + + protected boolean isSubselectLoadingEnabled() { + return hasSubselectLoadableCollections(); + } + + public void validateScrollability() throws HibernateException { + // This is the legacy behaviour for HQL queries... + if ( getCollectionPersisters() != null ) { + throw new HibernateException( "Cannot scroll queries which initialize collections" ); + } + } + + public boolean containsCollectionFetches() { + return false; + } + + public boolean isManipulationStatement() { + // classic parser does not support bulk manipulation statements + return false; + } + + public ParameterTranslations getParameterTranslations() { + return new ParameterTranslations() { + + public boolean supportsOrdinalParameterMetadata() { + // classic translator does not support collection of ordinal + // param metadata + return false; + } + + public int getOrdinalParameterCount() { + return 0; // not known! + } + + public int getOrdinalParameterSqlLocation(int ordinalPosition) { + return 0; // not known! + } + + public Type getOrdinalParameterExpectedType(int ordinalPosition) { + return null; // not known! + } + + public Set getNamedParameterNames() { + return namedParameters.keySet(); + } + + public int[] getNamedParameterSqlLocations(String name) { + return getNamedParameterLocs( name ); + } + + public Type getNamedParameterExpectedType(String name) { + return null; // not known! + } + }; + } +} diff --git a/src/org/hibernate/hql/classic/SelectParser.java b/src/org/hibernate/hql/classic/SelectParser.java new file mode 100644 index 0000000000..37b3e5899f --- /dev/null +++ b/src/org/hibernate/hql/classic/SelectParser.java @@ -0,0 +1,228 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.Hibernate; +import org.hibernate.QueryException; +import org.hibernate.dialect.function.SQLFunction; +import org.hibernate.hql.QuerySplitter; +import org.hibernate.type.Type; +import org.hibernate.util.ReflectHelper; + +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +/** + * Parsers the select clause of a Hibernate query. + * + * @author Gavin King, David Channon + */ +public class SelectParser implements Parser { + + //TODO: arithmetic expressions, multiple new Foo(...) + + private static final Set COUNT_MODIFIERS = new HashSet(); + + static { + COUNT_MODIFIERS.add( "distinct" ); + COUNT_MODIFIERS.add( "all" ); + COUNT_MODIFIERS.add( "*" ); + } + + private LinkedList aggregateFuncTokenList = new LinkedList(); + + private boolean ready; + private boolean aggregate; + private boolean first; + private boolean afterNew; + private boolean insideNew; + private boolean aggregateAddSelectScalar; + private Class holderClass; + + private final SelectPathExpressionParser pathExpressionParser; + private final PathExpressionParser aggregatePathExpressionParser; + + { + pathExpressionParser = new SelectPathExpressionParser(); + aggregatePathExpressionParser = new PathExpressionParser(); + //TODO: would be nice to use false, but issues with MS SQL + pathExpressionParser.setUseThetaStyleJoin( true ); + aggregatePathExpressionParser.setUseThetaStyleJoin( true ); + } + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + + String lctoken = token.toLowerCase(); + + if ( first ) { + first = false; + if ( "distinct".equals( lctoken ) ) { + q.setDistinct( true ); + return; + } + else if ( "all".equals( lctoken ) ) { + q.setDistinct( false ); + return; + } + } + + if ( afterNew ) { + afterNew = false; + try { + holderClass = ReflectHelper.classForName( QuerySplitter.getImportedClass( token, q.getFactory() ) ); + } + catch ( ClassNotFoundException cnfe ) { + throw new QueryException( cnfe ); + } + if ( holderClass == null ) throw new QueryException( "class not found: " + token ); + q.setHolderClass( holderClass ); + insideNew = true; + } + else if ( token.equals( "," ) ) { + if ( !aggregate && ready ) throw new QueryException( "alias or expression expected in SELECT" ); + q.appendScalarSelectToken( ", " ); + ready = true; + } + else if ( "new".equals( lctoken ) ) { + afterNew = true; + ready = false; + } + else if ( "(".equals( token ) ) { + if ( insideNew && !aggregate && !ready ) { + //opening paren in new Foo ( ... ) + ready = true; + } + else if ( aggregate ) { + q.appendScalarSelectToken( token ); + } + else { + throw new QueryException( "aggregate function expected before ( in SELECT" ); + } + ready = true; + } + else if ( ")".equals( token ) ) { + if ( insideNew && !aggregate && !ready ) { + //if we are inside a new Result(), but not inside a nested function + insideNew = false; + } + else if ( aggregate && ready ) { + q.appendScalarSelectToken( token ); + aggregateFuncTokenList.removeLast(); + if ( aggregateFuncTokenList.size() < 1 ) { + aggregate = false; + ready = false; + } + } + else { + throw new QueryException( "( expected before ) in select" ); + } + } + else if ( COUNT_MODIFIERS.contains( lctoken ) ) { + if ( !ready || !aggregate ) throw new QueryException( token + " only allowed inside aggregate function in SELECT" ); + q.appendScalarSelectToken( token ); + if ( "*".equals( token ) ) q.addSelectScalar( getFunction( "count", q ).getReturnType( Hibernate.LONG, q.getFactory() ) ); //special case + } + else if ( getFunction( lctoken, q ) != null && token.equals( q.unalias( token ) ) ) { + // the name of an SQL function + if ( !ready ) throw new QueryException( ", expected before aggregate function in SELECT: " + token ); + aggregate = true; + aggregateAddSelectScalar = true; + aggregateFuncTokenList.add( lctoken ); + ready = false; + q.appendScalarSelectToken( token ); + if ( !aggregateHasArgs( lctoken, q ) ) { + q.addSelectScalar( aggregateType( aggregateFuncTokenList, null, q ) ); + if ( !aggregateFuncNoArgsHasParenthesis( lctoken, q ) ) { + aggregateFuncTokenList.removeLast(); + if ( aggregateFuncTokenList.size() < 1 ) { + aggregate = false; + ready = false; + } + else { + ready = true; + } + } + } + } + else if ( aggregate ) { + boolean constantToken = false; + if ( !ready ) throw new QueryException( "( expected after aggregate function in SELECT" ); + try { + ParserHelper.parse( aggregatePathExpressionParser, q.unalias( token ), ParserHelper.PATH_SEPARATORS, q ); + } + catch ( QueryException qex ) { + constantToken = true; + } + + if ( constantToken ) { + q.appendScalarSelectToken( token ); + } + else { + if ( aggregatePathExpressionParser.isCollectionValued() ) { + q.addCollection( aggregatePathExpressionParser.getCollectionName(), + aggregatePathExpressionParser.getCollectionRole() ); + } + q.appendScalarSelectToken( aggregatePathExpressionParser.getWhereColumn() ); + if ( aggregateAddSelectScalar ) { + q.addSelectScalar( aggregateType( aggregateFuncTokenList, aggregatePathExpressionParser.getWhereColumnType(), q ) ); + aggregateAddSelectScalar = false; + } + aggregatePathExpressionParser.addAssociation( q ); + } + } + else { + if ( !ready ) throw new QueryException( ", expected in SELECT" ); + ParserHelper.parse( pathExpressionParser, q.unalias( token ), ParserHelper.PATH_SEPARATORS, q ); + if ( pathExpressionParser.isCollectionValued() ) { + q.addCollection( pathExpressionParser.getCollectionName(), + pathExpressionParser.getCollectionRole() ); + } + else if ( pathExpressionParser.getWhereColumnType().isEntityType() ) { + q.addSelectClass( pathExpressionParser.getSelectName() ); + } + q.appendScalarSelectTokens( pathExpressionParser.getWhereColumns() ); + q.addSelectScalar( pathExpressionParser.getWhereColumnType() ); + pathExpressionParser.addAssociation( q ); + + ready = false; + } + } + + public boolean aggregateHasArgs(String funcToken, QueryTranslatorImpl q) { + return getFunction( funcToken, q ).hasArguments(); + } + + public boolean aggregateFuncNoArgsHasParenthesis(String funcToken, QueryTranslatorImpl q) { + return getFunction( funcToken, q ).hasParenthesesIfNoArguments(); + } + + public Type aggregateType(List funcTokenList, Type type, QueryTranslatorImpl q) throws QueryException { + Type retType = type; + Type argType; + for ( int i = funcTokenList.size() - 1; i >= 0; i-- ) { + argType = retType; + String funcToken = ( String ) funcTokenList.get( i ); + retType = getFunction( funcToken, q ).getReturnType( argType, q.getFactory() ); + } + return retType; + } + + private SQLFunction getFunction(String name, QueryTranslatorImpl q) { + return q.getFactory().getSqlFunctionRegistry().findSQLFunction( name ); + } + + public void start(QueryTranslatorImpl q) { + ready = true; + first = true; + aggregate = false; + afterNew = false; + insideNew = false; + holderClass = null; + aggregateFuncTokenList.clear(); + } + + public void end(QueryTranslatorImpl q) { + } + +} diff --git a/src/org/hibernate/hql/classic/SelectPathExpressionParser.java b/src/org/hibernate/hql/classic/SelectPathExpressionParser.java new file mode 100644 index 0000000000..dde7435110 --- /dev/null +++ b/src/org/hibernate/hql/classic/SelectPathExpressionParser.java @@ -0,0 +1,31 @@ +//$Id$ +package org.hibernate.hql.classic; + +import org.hibernate.QueryException; + +public class SelectPathExpressionParser extends PathExpressionParser { + + public void end(QueryTranslatorImpl q) throws QueryException { + if ( getCurrentProperty() != null && !q.isShallowQuery() ) { + // "finish off" the join + token( ".", q ); + token( null, q ); + } + super.end( q ); + } + + protected void setExpectingCollectionIndex() throws QueryException { + throw new QueryException( "illegal syntax near collection-valued path expression in select: " + getCollectionName() ); + } + + public String getSelectName() { + return getCurrentName(); + } +} + + + + + + + diff --git a/src/org/hibernate/hql/classic/WhereParser.java b/src/org/hibernate/hql/classic/WhereParser.java new file mode 100644 index 0000000000..d468af13d3 --- /dev/null +++ b/src/org/hibernate/hql/classic/WhereParser.java @@ -0,0 +1,493 @@ +//$Id$ +package org.hibernate.hql.classic; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Map; +import java.util.Set; +import java.util.StringTokenizer; + +import org.hibernate.MappingException; +import org.hibernate.QueryException; +import org.hibernate.engine.JoinSequence; +import org.hibernate.hql.QueryTranslator; +import org.hibernate.persister.collection.CollectionPropertyNames; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.InFragment; +import org.hibernate.type.EntityType; +import org.hibernate.type.LiteralType; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +/** + * Parses the where clause of a hibernate query and translates it to an + * SQL where clause. + */ + +// We should reengineer this class so that, rather than the current ad - +// hoc linear approach to processing a stream of tokens, we instead +// build up a tree of expressions. + +// We would probably refactor to have LogicParser (builds a tree of simple +// expressions connected by and, or, not), ExpressionParser (translates +// from OO terms like foo, foo.Bar, foo.Bar.Baz to SQL terms like +// FOOS.ID, FOOS.BAR_ID, etc) and PathExpressionParser (which does much +// the same thing it does now) + +public class WhereParser implements Parser { + + private final PathExpressionParser pathExpressionParser; + + { + pathExpressionParser = new PathExpressionParser(); + pathExpressionParser.setUseThetaStyleJoin( true ); //Need this, since join condition can appear inside parens! + } + + private static final Set EXPRESSION_TERMINATORS = new HashSet(); //tokens that close a sub expression + private static final Set EXPRESSION_OPENERS = new HashSet(); //tokens that open a sub expression + private static final Set BOOLEAN_OPERATORS = new HashSet(); //tokens that would indicate a sub expression is a boolean expression + private static final Map NEGATIONS = new HashMap(); + + static { + EXPRESSION_TERMINATORS.add( "and" ); + EXPRESSION_TERMINATORS.add( "or" ); + EXPRESSION_TERMINATORS.add( ")" ); + //expressionTerminators.add(","); // deliberately excluded + + EXPRESSION_OPENERS.add( "and" ); + EXPRESSION_OPENERS.add( "or" ); + EXPRESSION_OPENERS.add( "(" ); + //expressionOpeners.add(","); // deliberately excluded + + BOOLEAN_OPERATORS.add( "<" ); + BOOLEAN_OPERATORS.add( "=" ); + BOOLEAN_OPERATORS.add( ">" ); + BOOLEAN_OPERATORS.add( "#" ); + BOOLEAN_OPERATORS.add( "~" ); + BOOLEAN_OPERATORS.add( "like" ); + BOOLEAN_OPERATORS.add( "ilike" ); + BOOLEAN_OPERATORS.add( "regexp" ); + BOOLEAN_OPERATORS.add( "rlike" ); + BOOLEAN_OPERATORS.add( "is" ); + BOOLEAN_OPERATORS.add( "in" ); + BOOLEAN_OPERATORS.add( "any" ); + BOOLEAN_OPERATORS.add( "some" ); + BOOLEAN_OPERATORS.add( "all" ); + BOOLEAN_OPERATORS.add( "exists" ); + BOOLEAN_OPERATORS.add( "between" ); + BOOLEAN_OPERATORS.add( "<=" ); + BOOLEAN_OPERATORS.add( ">=" ); + BOOLEAN_OPERATORS.add( "=>" ); + BOOLEAN_OPERATORS.add( "=<" ); + BOOLEAN_OPERATORS.add( "!=" ); + BOOLEAN_OPERATORS.add( "<>" ); + BOOLEAN_OPERATORS.add( "!#" ); + BOOLEAN_OPERATORS.add( "!~" ); + BOOLEAN_OPERATORS.add( "!<" ); + BOOLEAN_OPERATORS.add( "!>" ); + BOOLEAN_OPERATORS.add( "is not" ); + BOOLEAN_OPERATORS.add( "not like" ); + BOOLEAN_OPERATORS.add( "not ilike" ); + BOOLEAN_OPERATORS.add( "not regexp" ); + BOOLEAN_OPERATORS.add( "not rlike" ); + BOOLEAN_OPERATORS.add( "not in" ); + BOOLEAN_OPERATORS.add( "not between" ); + BOOLEAN_OPERATORS.add( "not exists" ); + + NEGATIONS.put( "and", "or" ); + NEGATIONS.put( "or", "and" ); + NEGATIONS.put( "<", ">=" ); + NEGATIONS.put( "=", "<>" ); + NEGATIONS.put( ">", "<=" ); + NEGATIONS.put( "#", "!#" ); + NEGATIONS.put( "~", "!~" ); + NEGATIONS.put( "like", "not like" ); + NEGATIONS.put( "ilike", "not ilike" ); + NEGATIONS.put( "regexp", "not regexp" ); + NEGATIONS.put( "rlike", "not rlike" ); + NEGATIONS.put( "is", "is not" ); + NEGATIONS.put( "in", "not in" ); + NEGATIONS.put( "exists", "not exists" ); + NEGATIONS.put( "between", "not between" ); + NEGATIONS.put( "<=", ">" ); + NEGATIONS.put( ">=", "<" ); + NEGATIONS.put( "=>", "<" ); + NEGATIONS.put( "=<", ">" ); + NEGATIONS.put( "!=", "=" ); + NEGATIONS.put( "<>", "=" ); + NEGATIONS.put( "!#", "#" ); + NEGATIONS.put( "!~", "~" ); + NEGATIONS.put( "!<", "<" ); + NEGATIONS.put( "!>", ">" ); + NEGATIONS.put( "is not", "is" ); + NEGATIONS.put( "not like", "like" ); + NEGATIONS.put( "not ilike", "ilike" ); + NEGATIONS.put( "not regexp", "regexp" ); + NEGATIONS.put( "not rlike", "rlike" ); + NEGATIONS.put( "not in", "in" ); + NEGATIONS.put( "not between", "between" ); + NEGATIONS.put( "not exists", "exists" ); + + } + // Handles things like: + // a and b or c + // a and ( b or c ) + // not a and not b + // not ( a and b ) + // x between y and z (overloaded "and") + // x in ( a, b, c ) (overloaded brackets) + // not not a + // a is not null (overloaded "not") + // etc...... + // and expressions like + // foo = bar (maps to: foo.id = bar.id) + // foo.Bar = 'foo' (maps to: foo.bar = 'foo') + // foo.Bar.Baz = 1.0 (maps to: foo.bar = bar.id and bar.baz = 1.0) + // 1.0 = foo.Bar.Baz (maps to: bar.baz = 1.0 and foo.Bar = bar.id) + // foo.Bar.Baz = a.B.C (maps to: bar.Baz = b.C and foo.Bar = bar.id and a.B = b.id) + // foo.Bar.Baz + a.B.C (maps to: bar.Baz + b.C and foo.Bar = bar.id and a.B = b.id) + // ( foo.Bar.Baz + 1.0 ) < 2.0 (maps to: ( bar.Baz + 1.0 ) < 2.0 and foo.Bar = bar.id) + + private boolean betweenSpecialCase = false; //Inside a BETWEEN ... AND ... expression + private boolean negated = false; + + private boolean inSubselect = false; + private int bracketsSinceSelect = 0; + private StringBuffer subselect; + + private boolean expectingPathContinuation = false; + private int expectingIndex = 0; + + // The following variables are stacks that keep information about each subexpression + // in the list of nested subexpressions we are currently processing. + + private LinkedList nots = new LinkedList(); //were an odd or even number of NOTs encountered + private LinkedList joins = new LinkedList(); //the join string built up by compound paths inside this expression + private LinkedList booleanTests = new LinkedList(); //a flag indicating if the subexpression is known to be boolean + + private String getElementName(PathExpressionParser.CollectionElement element, QueryTranslatorImpl q) throws QueryException { + String name; + if ( element.isOneToMany ) { + name = element.alias; + } + else { + Type type = element.elementType; + if ( type.isEntityType() ) { //ie. a many-to-many + String entityName = ( ( EntityType ) type ).getAssociatedEntityName(); + name = pathExpressionParser.continueFromManyToMany( entityName, element.elementColumns, q ); + } + else { + throw new QueryException( "illegally dereferenced collection element" ); + } + } + return name; + } + + public void token(String token, QueryTranslatorImpl q) throws QueryException { + + String lcToken = token.toLowerCase(); + + //Cope with [,] + if ( token.equals( "[" ) && !expectingPathContinuation ) { + expectingPathContinuation = false; + if ( expectingIndex == 0 ) throw new QueryException( "unexpected [" ); + return; + } + else if ( token.equals( "]" ) ) { + expectingIndex--; + expectingPathContinuation = true; + return; + } + + //Cope with a continued path expression (ie. ].baz) + if ( expectingPathContinuation ) { + boolean pathExpressionContinuesFurther = continuePathExpression( token, q ); + if ( pathExpressionContinuesFurther ) return; //NOTE: early return + } + + //Cope with a subselect + if ( !inSubselect && ( lcToken.equals( "select" ) || lcToken.equals( "from" ) ) ) { + inSubselect = true; + subselect = new StringBuffer( 20 ); + } + if ( inSubselect && token.equals( ")" ) ) { + bracketsSinceSelect--; + + if ( bracketsSinceSelect == -1 ) { + QueryTranslatorImpl subq = new QueryTranslatorImpl( + subselect.toString(), + q.getEnabledFilters(), + q.getFactory() + ); + try { + subq.compile( q ); + } + catch ( MappingException me ) { + throw new QueryException( "MappingException occurred compiling subquery", me ); + } + appendToken( q, subq.getSQLString() ); + inSubselect = false; + bracketsSinceSelect = 0; + } + } + if ( inSubselect ) { + if ( token.equals( "(" ) ) bracketsSinceSelect++; + subselect.append( token ).append( ' ' ); + return; + } + + //Cope with special cases of AND, NOT, () + specialCasesBefore( lcToken ); + + //Close extra brackets we opened + if ( !betweenSpecialCase && EXPRESSION_TERMINATORS.contains( lcToken ) ) { + closeExpression( q, lcToken ); + } + + //take note when this is a boolean expression + if ( BOOLEAN_OPERATORS.contains( lcToken ) ) { + booleanTests.removeLast(); + booleanTests.addLast( Boolean.TRUE ); + } + + if ( lcToken.equals( "not" ) ) { + nots.addLast( new Boolean( !( ( Boolean ) nots.removeLast() ).booleanValue() ) ); + negated = !negated; + return; //NOTE: early return + } + + //process a token, mapping OO path expressions to SQL expressions + doToken( token, q ); + + //Open any extra brackets we might need. + if ( !betweenSpecialCase && EXPRESSION_OPENERS.contains( lcToken ) ) { + openExpression( q, lcToken ); + } + + //Cope with special cases of AND, NOT, ) + specialCasesAfter( lcToken ); + + } + + public void start(QueryTranslatorImpl q) throws QueryException { + token( "(", q ); + } + + public void end(QueryTranslatorImpl q) throws QueryException { + if ( expectingPathContinuation ) { + expectingPathContinuation = false; + PathExpressionParser.CollectionElement element = pathExpressionParser.lastCollectionElement(); + if ( element.elementColumns.length != 1 ) throw new QueryException( "path expression ended in composite collection element" ); + appendToken( q, element.elementColumns[0] ); + addToCurrentJoin( element ); + } + token( ")", q ); + } + + private void closeExpression(QueryTranslatorImpl q, String lcToken) { + if ( ( ( Boolean ) booleanTests.removeLast() ).booleanValue() ) { //it was a boolean expression + + if ( booleanTests.size() > 0 ) { + // the next one up must also be + booleanTests.removeLast(); + booleanTests.addLast( Boolean.TRUE ); + } + + // Add any joins + appendToken( q, ( joins.removeLast() ).toString() ); + + } + else { + StringBuffer join = ( StringBuffer ) joins.removeLast(); + ( ( StringBuffer ) joins.getLast() ).append( join.toString() ); + } + + if ( ( ( Boolean ) nots.removeLast() ).booleanValue() ) negated = !negated; + + if ( !")".equals( lcToken ) ) appendToken( q, ")" ); + } + + private void openExpression(QueryTranslatorImpl q, String lcToken) { + nots.addLast( Boolean.FALSE ); + booleanTests.addLast( Boolean.FALSE ); + joins.addLast( new StringBuffer() ); + if ( !"(".equals( lcToken ) ) appendToken( q, "(" ); + } + + private void preprocess(String token, QueryTranslatorImpl q) throws QueryException { + // ugly hack for cases like "elements(foo.bar.collection)" + // (multi-part path expression ending in elements or indices) + String[] tokens = StringHelper.split( ".", token, true ); + if ( + tokens.length > 5 && + ( CollectionPropertyNames.COLLECTION_ELEMENTS.equals( tokens[tokens.length - 1] ) + || CollectionPropertyNames.COLLECTION_INDICES.equals( tokens[tokens.length - 1] ) ) + ) { + pathExpressionParser.start( q ); + for ( int i = 0; i < tokens.length - 3; i++ ) { + pathExpressionParser.token( tokens[i], q ); + } + pathExpressionParser.token( null, q ); + pathExpressionParser.end( q ); + addJoin( pathExpressionParser.getWhereJoin(), q ); + pathExpressionParser.ignoreInitialJoin(); + } + } + + private void doPathExpression(String token, QueryTranslatorImpl q) throws QueryException { + + preprocess( token, q ); + + StringTokenizer tokens = new StringTokenizer( token, ".", true ); + pathExpressionParser.start( q ); + while ( tokens.hasMoreTokens() ) { + pathExpressionParser.token( tokens.nextToken(), q ); + } + pathExpressionParser.end( q ); + if ( pathExpressionParser.isCollectionValued() ) { + openExpression( q, "" ); + appendToken( q, pathExpressionParser.getCollectionSubquery( q.getEnabledFilters() ) ); + closeExpression( q, "" ); + // this is ugly here, but needed because its a subquery + q.addQuerySpaces( q.getCollectionPersister( pathExpressionParser.getCollectionRole() ).getCollectionSpaces() ); + } + else { + if ( pathExpressionParser.isExpectingCollectionIndex() ) { + expectingIndex++; + } + else { + addJoin( pathExpressionParser.getWhereJoin(), q ); + appendToken( q, pathExpressionParser.getWhereColumn() ); + } + } + } + + private void addJoin(JoinSequence joinSequence, QueryTranslatorImpl q) throws QueryException { + //JoinFragment fromClause = q.createJoinFragment(true); + //fromClause.addJoins( join.toJoinFragment().toFromFragmentString(), StringHelper.EMPTY_STRING ); + q.addFromJoinOnly( pathExpressionParser.getName(), joinSequence ); + try { + addToCurrentJoin( joinSequence.toJoinFragment( q.getEnabledFilters(), true ).toWhereFragmentString() ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + } + + private void doToken(String token, QueryTranslatorImpl q) throws QueryException { + if ( q.isName( StringHelper.root( token ) ) ) { //path expression + doPathExpression( q.unalias( token ), q ); + } + else if ( token.startsWith( ParserHelper.HQL_VARIABLE_PREFIX ) ) { //named query parameter + q.addNamedParameter( token.substring( 1 ) ); + appendToken( q, "?" ); + } + else { + Queryable persister = q.getEntityPersisterUsingImports( token ); + if ( persister != null ) { // the name of a class + final String discrim = persister.getDiscriminatorSQLValue(); + if ( InFragment.NULL.equals(discrim) || InFragment.NOT_NULL.equals(discrim) ) { + throw new QueryException( "subclass test not allowed for null or not null discriminator" ); + } + else { + appendToken( q, discrim ); + } + } + else { + Object constant; + if ( + token.indexOf( '.' ) > -1 && + ( constant = ReflectHelper.getConstantValue( token ) ) != null + ) { + Type type; + try { + type = TypeFactory.heuristicType( constant.getClass().getName() ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + if ( type == null ) throw new QueryException( QueryTranslator.ERROR_CANNOT_DETERMINE_TYPE + token ); + try { + appendToken( q, ( ( LiteralType ) type ).objectToSQLString( constant, q.getFactory().getDialect() ) ); + } + catch ( Exception e ) { + throw new QueryException( QueryTranslator.ERROR_CANNOT_FORMAT_LITERAL + token, e ); + } + } + else { //anything else + + String negatedToken = negated ? ( String ) NEGATIONS.get( token.toLowerCase() ) : null; + if ( negatedToken != null && ( !betweenSpecialCase || !"or".equals( negatedToken ) ) ) { + appendToken( q, negatedToken ); + } + else { + appendToken( q, token ); + } + } + } + } + } + + private void addToCurrentJoin(String sql) { + ( ( StringBuffer ) joins.getLast() ).append( sql ); + } + + private void addToCurrentJoin(PathExpressionParser.CollectionElement ce) + throws QueryException { + try { + addToCurrentJoin( ce.joinSequence.toJoinFragment().toWhereFragmentString() + ce.indexValue.toString() ); + } + catch ( MappingException me ) { + throw new QueryException( me ); + } + } + + private void specialCasesBefore(String lcToken) { + if ( lcToken.equals( "between" ) || lcToken.equals( "not between" ) ) { + betweenSpecialCase = true; + } + } + + private void specialCasesAfter(String lcToken) { + if ( betweenSpecialCase && lcToken.equals( "and" ) ) { + betweenSpecialCase = false; + } + } + + void appendToken(QueryTranslatorImpl q, String token) { + if ( expectingIndex > 0 ) { + pathExpressionParser.setLastCollectionElementIndexValue( token ); + } + else { + q.appendWhereToken( token ); + } + } + + private boolean continuePathExpression(String token, QueryTranslatorImpl q) throws QueryException { + + expectingPathContinuation = false; + + PathExpressionParser.CollectionElement element = pathExpressionParser.lastCollectionElement(); + + if ( token.startsWith( "." ) ) { // the path expression continues after a ] + + doPathExpression( getElementName( element, q ) + token, q ); // careful with this! + + addToCurrentJoin( element ); + return true; //NOTE: EARLY EXIT! + + } + + else { // the path expression ends at the ] + if ( element.elementColumns.length != 1 ) { + throw new QueryException( "path expression ended in composite collection element" ); + } + appendToken( q, element.elementColumns[0] ); + addToCurrentJoin( element ); + return false; + } + } +} diff --git a/src/org/hibernate/hql/classic/package.html b/src/org/hibernate/hql/classic/package.html new file mode 100755 index 0000000000..90e9eef765 --- /dev/null +++ b/src/org/hibernate/hql/classic/package.html @@ -0,0 +1,9 @@ + + + +

    + This package contains the Hibernate 2.x query parser which + is being end-of-lifed. +

    + + diff --git a/src/org/hibernate/hql/package.html b/src/org/hibernate/hql/package.html new file mode 100755 index 0000000000..442cfea20c --- /dev/null +++ b/src/org/hibernate/hql/package.html @@ -0,0 +1,10 @@ + + + +

    + This package defines the interface between Hibernate and + the HQL query parser implementation (to allow switching + between the 2.x and 3.0 HQL parsers). +

    + + diff --git a/src/org/hibernate/id/AbstractPostInsertGenerator.java b/src/org/hibernate/id/AbstractPostInsertGenerator.java new file mode 100755 index 0000000000..695b4ba5d5 --- /dev/null +++ b/src/org/hibernate/id/AbstractPostInsertGenerator.java @@ -0,0 +1,15 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; + +import org.hibernate.engine.SessionImplementor; + +/** + * @author Gavin King + */ +public abstract class AbstractPostInsertGenerator implements PostInsertIdentifierGenerator{ + public Serializable generate(SessionImplementor s, Object obj) { + return IdentifierGeneratorFactory.POST_INSERT_INDICATOR; + } +} diff --git a/src/org/hibernate/id/AbstractUUIDGenerator.java b/src/org/hibernate/id/AbstractUUIDGenerator.java new file mode 100644 index 0000000000..fa43a149e7 --- /dev/null +++ b/src/org/hibernate/id/AbstractUUIDGenerator.java @@ -0,0 +1,77 @@ +//$Id$ +package org.hibernate.id; + +import java.net.InetAddress; +import org.hibernate.util.BytesHelper; + +/** + * The base class for identifier generators that use a UUID algorithm. This + * class implements the algorithm, subclasses define the identifier + * format. + * + * @see UUIDHexGenerator + * @author Gavin King + */ + +public abstract class AbstractUUIDGenerator implements IdentifierGenerator { + + private static final int IP; + static { + int ipadd; + try { + ipadd = BytesHelper.toInt( InetAddress.getLocalHost().getAddress() ); + } + catch (Exception e) { + ipadd = 0; + } + IP = ipadd; + } + private static short counter = (short) 0; + private static final int JVM = (int) ( System.currentTimeMillis() >>> 8 ); + + public AbstractUUIDGenerator() { + } + + /** + * Unique across JVMs on this machine (unless they load this class + * in the same quater second - very unlikely) + */ + protected int getJVM() { + return JVM; + } + + /** + * Unique in a millisecond for this JVM instance (unless there + * are > Short.MAX_VALUE instances created in a millisecond) + */ + protected short getCount() { + synchronized(AbstractUUIDGenerator.class) { + if (counter<0) counter=0; + return counter++; + } + } + + /** + * Unique in a local network + */ + protected int getIP() { + return IP; + } + + /** + * Unique down to millisecond + */ + protected short getHiTime() { + return (short) ( System.currentTimeMillis() >>> 32 ); + } + protected int getLoTime() { + return (int) System.currentTimeMillis(); + } + + +} + + + + + diff --git a/src/org/hibernate/id/Assigned.java b/src/org/hibernate/id/Assigned.java new file mode 100644 index 0000000000..21b2325b00 --- /dev/null +++ b/src/org/hibernate/id/Assigned.java @@ -0,0 +1,55 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.util.Properties; + +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.type.Type; + +/** + * assigned
    + *
    + * An IdentifierGenerator that returns the current identifier assigned + * to an instance. + * + * @author Gavin King + */ + +public class Assigned implements IdentifierGenerator, Configurable { + + private String entityName; + + public Serializable generate(SessionImplementor session, Object obj) throws HibernateException { + + final Serializable id = session.getEntityPersister( entityName, obj ) + //TODO: cache the persister, this shows up in yourkit + .getIdentifier( obj, session.getEntityMode() ); + + if (id==null) { + throw new IdentifierGenerationException( + "ids for this class must be manually assigned before calling save(): " + + entityName + ); + } + + return id; + } + + public void configure(Type type, Properties params, Dialect d) + throws MappingException { + entityName = params.getProperty(ENTITY_NAME); + if (entityName==null) { + throw new MappingException("no entity name"); + } + } +} + + + + + + diff --git a/src/org/hibernate/id/Configurable.java b/src/org/hibernate/id/Configurable.java new file mode 100644 index 0000000000..1bf8341934 --- /dev/null +++ b/src/org/hibernate/id/Configurable.java @@ -0,0 +1,27 @@ +//$Id$ +package org.hibernate.id; + +import java.util.Properties; + +import org.hibernate.MappingException; +import org.hibernate.dialect.Dialect; +import org.hibernate.type.Type; + +/** + * An IdentifierGenerator that supports "configuration". + * + * @see IdentifierGenerator + * @author Gavin King + */ +public interface Configurable { + + /** + * Configure this instance, given the value of parameters + * specified by the user as <param> elements. + * This method is called just once, following instantiation. + * + * @param params param values, keyed by parameter name + */ + public void configure(Type type, Properties params, Dialect d) throws MappingException; + +} diff --git a/src/org/hibernate/id/ForeignGenerator.java b/src/org/hibernate/id/ForeignGenerator.java new file mode 100644 index 0000000000..f505cd051f --- /dev/null +++ b/src/org/hibernate/id/ForeignGenerator.java @@ -0,0 +1,87 @@ +package org.hibernate.id; + +import java.io.Serializable; +import java.util.Properties; + +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.Session; +import org.hibernate.TransientObjectException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.ForeignKeys; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; + +/** + * foreign
    + *
    + * An Identifier generator that uses the value of the id property of an + * associated object
    + *
    + * One mapping parameter is required: property. + * + * @author Gavin King + */ +public class ForeignGenerator implements IdentifierGenerator, Configurable { + + private String propertyName; + private String entityName; + + /** + * @see org.hibernate.id.IdentifierGenerator#generate(org.hibernate.engine.SessionImplementor, java.lang.Object) + */ + public Serializable generate(SessionImplementor sessionImplementor, Object object) + throws HibernateException { + + Session session = (Session) sessionImplementor; + + Object associatedObject = sessionImplementor.getFactory() + .getClassMetadata( entityName ) + .getPropertyValue( object, propertyName, session.getEntityMode() ); + + if ( associatedObject == null ) { + throw new IdentifierGenerationException( + "attempted to assign id from null one-to-one property: " + + propertyName + ); + } + + EntityType type = (EntityType) sessionImplementor.getFactory() + .getClassMetadata( entityName ) + .getPropertyType( propertyName ); + + Serializable id; + try { + id = ForeignKeys.getEntityIdentifierIfNotUnsaved( + type.getAssociatedEntityName(), + associatedObject, + sessionImplementor + ); + } + catch (TransientObjectException toe) { + id = session.save( type.getAssociatedEntityName(), associatedObject ); + } + + if ( session.contains(object) ) { + //abort the save (the object is already saved by a circular cascade) + return IdentifierGeneratorFactory.SHORT_CIRCUIT_INDICATOR; + //throw new IdentifierGenerationException("save associated object first, or disable cascade for inverse association"); + } + return id; + } + + /** + * @see org.hibernate.id.Configurable#configure(org.hibernate.type.Type, java.util.Properties, org.hibernate.dialect.Dialect) + */ + public void configure(Type type, Properties params, Dialect d) + throws MappingException { + + propertyName = params.getProperty("property"); + entityName = params.getProperty(ENTITY_NAME); + if (propertyName==null) throw new MappingException( + "param named \"property\" is required for foreign id generation strategy" + ); + } + +} diff --git a/src/org/hibernate/id/GUIDGenerator.java b/src/org/hibernate/id/GUIDGenerator.java new file mode 100644 index 0000000000..1cffcf2eef --- /dev/null +++ b/src/org/hibernate/id/GUIDGenerator.java @@ -0,0 +1,57 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; + +/** + * Generates string values using the SQL Server NEWID() function. + * + * @author Joseph Fifield + */ +public class GUIDGenerator implements IdentifierGenerator { + + private static final Log log = LogFactory.getLog(GUIDGenerator.class); + + public Serializable generate(SessionImplementor session, Object obj) + throws HibernateException { + + final String sql = session.getFactory().getDialect().getSelectGUIDString(); + try { + PreparedStatement st = session.getBatcher().prepareSelectStatement(sql); + try { + ResultSet rs = st.executeQuery(); + final String result; + try { + rs.next(); + result = rs.getString(1); + } + finally { + rs.close(); + } + log.debug("GUID identifier generated: " + result); + return result; + } + finally { + session.getBatcher().closeStatement(st); + } + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not retrieve GUID", + sql + ); + } + } + +} diff --git a/src/org/hibernate/id/IdentifierGenerationException.java b/src/org/hibernate/id/IdentifierGenerationException.java new file mode 100644 index 0000000000..1f56a63c50 --- /dev/null +++ b/src/org/hibernate/id/IdentifierGenerationException.java @@ -0,0 +1,30 @@ +//$Id$ +package org.hibernate.id; + +import org.hibernate.HibernateException; + +/** + * Thrown by IdentifierGenerator implementation class when + * ID generation fails. + * + * @see IdentifierGenerator + * @author Gavin King + */ + +public class IdentifierGenerationException extends HibernateException { + + public IdentifierGenerationException(String msg) { + super(msg); + } + + public IdentifierGenerationException(String msg, Throwable t) { + super(msg, t); + } + +} + + + + + + diff --git a/src/org/hibernate/id/IdentifierGenerator.java b/src/org/hibernate/id/IdentifierGenerator.java new file mode 100644 index 0000000000..fb02270c3a --- /dev/null +++ b/src/org/hibernate/id/IdentifierGenerator.java @@ -0,0 +1,45 @@ +//$Id$ +package org.hibernate.id; + +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; + +import java.io.Serializable; + +/** + * The general contract between a class that generates unique + * identifiers and the Session. It is not intended that + * this interface ever be exposed to the application. It is + * intended that users implement this interface to provide + * custom identifier generation strategies.
    + *
    + * Implementors should provide a public default constructor.
    + *
    + * Implementations that accept configuration parameters should + * also implement Configurable. + *
    + * Implementors must be threadsafe + * + * @author Gavin King + * @see PersistentIdentifierGenerator + * @see Configurable + */ +public interface IdentifierGenerator { + + /** + * The configuration parameter holding the entity name + */ + public static final String ENTITY_NAME = "entity_name"; + + /** + * Generate a new identifier. + * @param session + * @param object the entity or toplevel collection for which the id is being generated + * + * @return a new identifier + * @throws HibernateException + */ + public Serializable generate(SessionImplementor session, Object object) + throws HibernateException; + +} diff --git a/src/org/hibernate/id/IdentifierGeneratorFactory.java b/src/org/hibernate/id/IdentifierGeneratorFactory.java new file mode 100644 index 0000000000..071ff04f0b --- /dev/null +++ b/src/org/hibernate/id/IdentifierGeneratorFactory.java @@ -0,0 +1,131 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.dialect.Dialect; +import org.hibernate.type.Type; +import org.hibernate.util.ReflectHelper; + +/** + * Factory and helper methods for IdentifierGenerator framework. + * + * @author Gavin King + */ +public final class IdentifierGeneratorFactory { + + private static final Log log = LogFactory.getLog(IdentifierGeneratorFactory.class); + + /** + * Get the generated identifier when using identity columns + */ + public static Serializable getGeneratedIdentity(ResultSet rs, Type type) + throws SQLException, HibernateException, IdentifierGenerationException { + if ( !rs.next() ) { + throw new HibernateException( "The database returned no natively generated identity value" ); + } + final Serializable id = IdentifierGeneratorFactory.get( rs, type ); + + if ( log.isDebugEnabled() ) log.debug( "Natively generated identity: " + id ); + return id; + } + + // unhappy about this being public ... is there a better way? + public static Serializable get(ResultSet rs, Type type) + throws SQLException, IdentifierGenerationException { + + Class clazz = type.getReturnedClass(); + if ( clazz==Long.class ) { + return new Long( rs.getLong(1) ); + } + else if ( clazz==Integer.class ) { + return new Integer( rs.getInt(1) ); + } + else if ( clazz==Short.class ) { + return new Short( rs.getShort(1) ); + } + else if ( clazz==String.class ) { + return rs.getString(1); + } + else { + throw new IdentifierGenerationException("this id generator generates long, integer, short or string"); + } + + } + + private static final HashMap GENERATORS = new HashMap(); + + public static final Serializable SHORT_CIRCUIT_INDICATOR = new Serializable() { + public String toString() { return "SHORT_CIRCUIT_INDICATOR"; } + }; + + public static final Serializable POST_INSERT_INDICATOR = new Serializable() { + public String toString() { return "POST_INSERT_INDICATOR"; } + }; + + static { + GENERATORS.put("uuid", UUIDHexGenerator.class); + GENERATORS.put("hilo", TableHiLoGenerator.class); + GENERATORS.put("assigned", Assigned.class); + GENERATORS.put("identity", IdentityGenerator.class); + GENERATORS.put("select", SelectGenerator.class); + GENERATORS.put("sequence", SequenceGenerator.class); + GENERATORS.put("seqhilo", SequenceHiLoGenerator.class); + GENERATORS.put("increment", IncrementGenerator.class); + GENERATORS.put("foreign", ForeignGenerator.class); + GENERATORS.put("guid", GUIDGenerator.class); + GENERATORS.put("uuid.hex", UUIDHexGenerator.class); //uuid.hex is deprecated + GENERATORS.put("sequence-identity", SequenceIdentityGenerator.class); + } + + public static IdentifierGenerator create(String strategy, Type type, Properties params, Dialect dialect) + throws MappingException { + try { + Class clazz = getIdentifierGeneratorClass( strategy, dialect ); + IdentifierGenerator idgen = (IdentifierGenerator) clazz.newInstance(); + if (idgen instanceof Configurable) ( (Configurable) idgen).configure(type, params, dialect); + return idgen; + } + catch (Exception e) { + throw new MappingException("could not instantiate id generator", e); + } + } + + public static Class getIdentifierGeneratorClass(String strategy, Dialect dialect) { + Class clazz = (Class) GENERATORS.get(strategy); + if ( "native".equals(strategy) ) clazz = dialect.getNativeIdentifierGeneratorClass(); + try { + if (clazz==null) clazz = ReflectHelper.classForName(strategy); + } + catch (ClassNotFoundException e) { + throw new MappingException("could not interpret id generator strategy: " + strategy); + } + return clazz; + } + + public static Number createNumber(long value, Class clazz) throws IdentifierGenerationException { + if ( clazz==Long.class ) { + return new Long(value); + } + else if ( clazz==Integer.class ) { + return new Integer( (int) value ); + } + else if ( clazz==Short.class ) { + return new Short( (short) value ); + } + else { + throw new IdentifierGenerationException("this id generator generates long, integer, short"); + } + } + + private IdentifierGeneratorFactory() {} //cannot be instantiated + +} diff --git a/src/org/hibernate/id/IdentityGenerator.java b/src/org/hibernate/id/IdentityGenerator.java new file mode 100644 index 0000000000..fd9fda2054 --- /dev/null +++ b/src/org/hibernate/id/IdentityGenerator.java @@ -0,0 +1,161 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.PreparedStatement; + +import org.hibernate.engine.SessionImplementor; +import org.hibernate.id.insert.InsertGeneratedIdentifierDelegate; +import org.hibernate.id.insert.IdentifierGeneratingInsert; +import org.hibernate.id.insert.AbstractSelectingDelegate; +import org.hibernate.id.insert.AbstractReturningDelegate; +import org.hibernate.id.insert.InsertSelectIdentityInsert; +import org.hibernate.dialect.Dialect; +import org.hibernate.HibernateException; +import org.hibernate.AssertionFailure; +import org.hibernate.util.GetGeneratedKeysHelper; + + +/** + * A generator for use with ANSI-SQL IDENTITY columns used as the primary key. + * The IdentityGenerator for autoincrement/identity key generation. + *

    + * Indicates to the Session that identity (ie. identity/autoincrement + * column) key generation should be used. + * + * @author Christoph Sturm + */ +public class IdentityGenerator extends AbstractPostInsertGenerator { + + public InsertGeneratedIdentifierDelegate getInsertGeneratedIdentifierDelegate( + PostInsertIdentityPersister persister, + Dialect dialect, + boolean isGetGeneratedKeysEnabled) throws HibernateException { + if ( isGetGeneratedKeysEnabled ) { + return new GetGeneratedKeysDelegate( persister, dialect ); + } + else if ( dialect.supportsInsertSelectIdentity() ) { + return new InsertSelectDelegate( persister, dialect ); + } + else { + return new BasicDelegate( persister, dialect ); + } + } + + /** + * Delegate for dealing with IDENTITY columns using JDBC3 getGeneratedKeys + */ + public static class GetGeneratedKeysDelegate + extends AbstractReturningDelegate + implements InsertGeneratedIdentifierDelegate { + private final PostInsertIdentityPersister persister; + private final Dialect dialect; + + public GetGeneratedKeysDelegate(PostInsertIdentityPersister persister, Dialect dialect) { + super( persister ); + this.persister = persister; + this.dialect = dialect; + } + + public IdentifierGeneratingInsert prepareIdentifierGeneratingInsert() { + IdentifierGeneratingInsert insert = new IdentifierGeneratingInsert( dialect ); + insert.addIdentityColumn( persister.getRootTableKeyColumnNames()[0] ); + return insert; + } + + protected PreparedStatement prepare(String insertSQL, SessionImplementor session) throws SQLException { + return session.getBatcher().prepareStatement( insertSQL, true ); + } + + public Serializable executeAndExtract(PreparedStatement insert) throws SQLException { + insert.executeUpdate(); + return IdentifierGeneratorFactory.getGeneratedIdentity( + GetGeneratedKeysHelper.getGeneratedKey( insert ), + persister.getIdentifierType() + ); + } + } + + /** + * Delegate for dealing with IDENTITY columns where the dialect supports returning + * the generated IDENTITY value directly from the insert statement. + */ + public static class InsertSelectDelegate + extends AbstractReturningDelegate + implements InsertGeneratedIdentifierDelegate { + private final PostInsertIdentityPersister persister; + private final Dialect dialect; + + public InsertSelectDelegate(PostInsertIdentityPersister persister, Dialect dialect) { + super( persister ); + this.persister = persister; + this.dialect = dialect; + } + + public IdentifierGeneratingInsert prepareIdentifierGeneratingInsert() { + InsertSelectIdentityInsert insert = new InsertSelectIdentityInsert( dialect ); + insert.addIdentityColumn( persister.getRootTableKeyColumnNames()[0] ); + return insert; + } + + protected PreparedStatement prepare(String insertSQL, SessionImplementor session) throws SQLException { + return session.getBatcher().prepareStatement( insertSQL, false ); + } + + public Serializable executeAndExtract(PreparedStatement insert) throws SQLException { + if ( !insert.execute() ) { + while ( !insert.getMoreResults() && insert.getUpdateCount() != -1 ) { + // do nothing until we hit the rsult set containing the generated id + } + } + ResultSet rs = insert.getResultSet(); + try { + return IdentifierGeneratorFactory.getGeneratedIdentity( rs, persister.getIdentifierType() ); + } + finally { + rs.close(); + } + } + + public Serializable determineGeneratedIdentifier(SessionImplementor session, Object entity) { + throw new AssertionFailure( "insert statement returns generated value" ); + } + } + + /** + * Delegate for dealing with IDENTITY columns where the dialect requires an + * additional command execution to retrieve the generated IDENTITY value + */ + public static class BasicDelegate + extends AbstractSelectingDelegate + implements InsertGeneratedIdentifierDelegate { + private final PostInsertIdentityPersister persister; + private final Dialect dialect; + + public BasicDelegate(PostInsertIdentityPersister persister, Dialect dialect) { + super( persister ); + this.persister = persister; + this.dialect = dialect; + } + + public IdentifierGeneratingInsert prepareIdentifierGeneratingInsert() { + IdentifierGeneratingInsert insert = new IdentifierGeneratingInsert( dialect ); + insert.addIdentityColumn( persister.getRootTableKeyColumnNames()[0] ); + return insert; + } + + protected String getSelectSQL() { + return persister.getIdentitySelectString(); + } + + protected Serializable getResult( + SessionImplementor session, + ResultSet rs, + Object object) throws SQLException { + return IdentifierGeneratorFactory.getGeneratedIdentity( rs, persister.getIdentifierType() ); + } + } + +} diff --git a/src/org/hibernate/id/IncrementGenerator.java b/src/org/hibernate/id/IncrementGenerator.java new file mode 100644 index 0000000000..768550c69d --- /dev/null +++ b/src/org/hibernate/id/IncrementGenerator.java @@ -0,0 +1,116 @@ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.mapping.Table; +import org.hibernate.type.Type; +import org.hibernate.util.StringHelper; + +/** + * increment
    + *
    + * An IdentifierGenerator that returns a long, constructed by + * counting from the maximum primary key value at startup. Not safe for use in a + * cluster!
    + *
    + * Mapping parameters supported, but not usually needed: tables, column. + * (The tables parameter specified a comma-separated list of table names.) + * + * @author Gavin King + */ +public class IncrementGenerator implements IdentifierGenerator, Configurable { + + private static final Log log = LogFactory.getLog(IncrementGenerator.class); + + private long next; + private String sql; + private Class returnClass; + + public synchronized Serializable generate(SessionImplementor session, Object object) + throws HibernateException { + + if (sql!=null) { + getNext( session ); + } + return IdentifierGeneratorFactory.createNumber(next++, returnClass); + } + + public void configure(Type type, Properties params, Dialect dialect) + throws MappingException { + + String tableList = params.getProperty("tables"); + if (tableList==null) tableList = params.getProperty(PersistentIdentifierGenerator.TABLES); + String[] tables = StringHelper.split(", ", tableList); + String column = params.getProperty("column"); + if (column==null) column = params.getProperty(PersistentIdentifierGenerator.PK); + String schema = params.getProperty(PersistentIdentifierGenerator.SCHEMA); + String catalog = params.getProperty(PersistentIdentifierGenerator.CATALOG); + returnClass = type.getReturnedClass(); + + + StringBuffer buf = new StringBuffer(); + for ( int i=0; i1) { + buf.append("select ").append(column).append(" from "); + } + buf.append( Table.qualify( catalog, schema, tables[i] ) ); + if ( i1) { + buf.insert(0, "( ").append(" ) ids_"); + column = "ids_." + column; + } + + sql = "select max(" + column + ") from " + buf.toString(); + } + + private void getNext( SessionImplementor session ) { + + log.debug("fetching initial value: " + sql); + + try { + PreparedStatement st = session.getBatcher().prepareSelectStatement(sql); + try { + ResultSet rs = st.executeQuery(); + try { + if ( rs.next() ) { + next = rs.getLong(1) + 1; + if ( rs.wasNull() ) next = 1; + } + else { + next = 1; + } + sql=null; + log.debug("first free id: " + next); + } + finally { + rs.close(); + } + } + finally { + session.getBatcher().closeStatement(st); + } + + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not fetch initial value for increment generator", + sql + ); + } + } + +} diff --git a/src/org/hibernate/id/MultipleHiLoPerTableGenerator.java b/src/org/hibernate/id/MultipleHiLoPerTableGenerator.java new file mode 100644 index 0000000000..59b4f4fb24 --- /dev/null +++ b/src/org/hibernate/id/MultipleHiLoPerTableGenerator.java @@ -0,0 +1,241 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.TransactionHelper; +import org.hibernate.mapping.Table; +import org.hibernate.type.Type; +import org.hibernate.util.PropertiesHelper; + +/** + * + * A hilo IdentifierGenerator that returns a Long, constructed using + * a hi/lo algorithm. The hi value MUST be fetched in a seperate transaction + * to the Session transaction so the generator must be able to obtain + * a new connection and commit it. Hence this implementation may not + * be used when the user is supplying connections. In this + * case a SequenceHiLoGenerator would be a better choice (where + * supported).
    + *
    + * + * A hilo IdentifierGenerator that uses a database + * table to store the last generated values. A table can contains + * several hi values. They are distinct from each other through a key + *

    + *

    This implementation is not compliant with a user connection

    + *

    + * + *

    Allowed parameters (all of them are optional):

    + *
      + *
    • table: table name (default hibernate_sequences)
    • + *
    • primary_key_column: key column name (default sequence_name)
    • + *
    • value_column: hi value column name(default sequence_next_hi_value)
    • + *
    • primary_key_value: key value for the current entity (default to the entity's primary table name)
    • + *
    • primary_key_length: length of the key column in DB represented as a varchar (default to 255)
    • + *
    • max_lo: max low value before increasing hi (default to Short.MAX_VALUE)
    • + *
    + * + * @author Emmanuel Bernard + * @author Klaus Richarz. + */ +public class MultipleHiLoPerTableGenerator + extends TransactionHelper + implements PersistentIdentifierGenerator, Configurable { + + private static final Log log = LogFactory.getLog(MultipleHiLoPerTableGenerator.class); + + public static final String ID_TABLE = "table"; + public static final String PK_COLUMN_NAME = "primary_key_column"; + public static final String PK_VALUE_NAME = "primary_key_value"; + public static final String VALUE_COLUMN_NAME = "value_column"; + public static final String PK_LENGTH_NAME = "primary_key_length"; + + private static final int DEFAULT_PK_LENGTH = 255; + public static final String DEFAULT_TABLE = "hibernate_sequences"; + private static final String DEFAULT_PK_COLUMN = "sequence_name"; + private static final String DEFAULT_VALUE_COLUMN = "sequence_next_hi_value"; + + private String tableName; + private String pkColumnName; + private String valueColumnName; + private String query; + private String insert; + private String update; + + //hilo params + public static final String MAX_LO = "max_lo"; + + private long hi; + private int lo; + private int maxLo; + private Class returnClass; + private int keySize; + + + public String[] sqlCreateStrings(Dialect dialect) throws HibernateException { + return new String[] { + new StringBuffer( dialect.getCreateTableString() ) + .append( ' ' ) + .append( tableName ) + .append( " ( " ) + .append( pkColumnName ) + .append( ' ' ) + .append( dialect.getTypeName( Types.VARCHAR, keySize, 0, 0 ) ) + .append( ", " ) + .append( valueColumnName ) + .append( ' ' ) + .append( dialect.getTypeName( Types.INTEGER ) ) + .append( " ) " ) + .toString() + }; + } + + public String[] sqlDropStrings(Dialect dialect) throws HibernateException { + StringBuffer sqlDropString = new StringBuffer( "drop table " ); + if ( dialect.supportsIfExistsBeforeTableName() ) { + sqlDropString.append( "if exists " ); + } + sqlDropString.append( tableName ).append( dialect.getCascadeConstraintsString() ); + if ( dialect.supportsIfExistsAfterTableName() ) { + sqlDropString.append( " if exists" ); + } + return new String[] { sqlDropString.toString() }; + } + + public Object generatorKey() { + return tableName; + } + + public Serializable doWorkInCurrentTransaction(Connection conn, String sql) throws SQLException { + int result; + int rows; + do { + // The loop ensures atomicity of the + // select + update even for no transaction + // or read committed isolation level + + //sql = query; + SQL.debug(query); + PreparedStatement qps = conn.prepareStatement(query); + PreparedStatement ips = null; + try { + //qps.setString(1, key); + ResultSet rs = qps.executeQuery(); + boolean isInitialized = rs.next(); + if ( !isInitialized ) { + result = 0; + ips = conn.prepareStatement(insert); + //ips.setString(1, key); + ips.setInt(1, result); + ips.execute(); + } + else { + result = rs.getInt(1); + } + rs.close(); + } + catch (SQLException sqle) { + log.error("could not read or init a hi value", sqle); + throw sqle; + } + finally { + if (ips != null) { + ips.close(); + } + qps.close(); + } + + //sql = update; + PreparedStatement ups = conn.prepareStatement(update); + try { + ups.setInt( 1, result + 1 ); + ups.setInt( 2, result ); + //ups.setString( 3, key ); + rows = ups.executeUpdate(); + } + catch (SQLException sqle) { + log.error("could not update hi value in: " + tableName, sqle); + throw sqle; + } + finally { + ups.close(); + } + } + while (rows==0); + return new Integer(result); + } + + public synchronized Serializable generate(SessionImplementor session, Object obj) + throws HibernateException { + if (maxLo < 1) { + //keep the behavior consistent even for boundary usages + int val = ( (Integer) doWorkInNewTransaction(session) ).intValue(); + if (val == 0) val = ( (Integer) doWorkInNewTransaction(session) ).intValue(); + return IdentifierGeneratorFactory.createNumber( val, returnClass ); + } + if (lo>maxLo) { + int hival = ( (Integer) doWorkInNewTransaction(session) ).intValue(); + lo = (hival == 0) ? 1 : 0; + hi = hival * (maxLo+1); + log.debug("new hi value: " + hival); + } + return IdentifierGeneratorFactory.createNumber( hi + lo++, returnClass ); + } + + public void configure(Type type, Properties params, Dialect dialect) throws MappingException { + tableName = PropertiesHelper.getString(ID_TABLE, params, DEFAULT_TABLE); + pkColumnName = PropertiesHelper.getString(PK_COLUMN_NAME, params, DEFAULT_PK_COLUMN); + valueColumnName = PropertiesHelper.getString(VALUE_COLUMN_NAME, params, DEFAULT_VALUE_COLUMN); + String schemaName = params.getProperty(SCHEMA); + String catalogName = params.getProperty(CATALOG); + keySize = PropertiesHelper.getInt(PK_LENGTH_NAME, params, DEFAULT_PK_LENGTH); + String keyValue = PropertiesHelper.getString(PK_VALUE_NAME, params, params.getProperty(TABLE) ); + + if ( tableName.indexOf( '.' )<0 ) { + tableName = Table.qualify( catalogName, schemaName, tableName ); + } + + query = "select " + + valueColumnName + + " from " + + dialect.appendLockHint(LockMode.UPGRADE, tableName) + + " where " + pkColumnName + " = '" + keyValue + "'" + + dialect.getForUpdateString(); + + update = "update " + + tableName + + " set " + + valueColumnName + + " = ? where " + + valueColumnName + + " = ? and " + + pkColumnName + + " = '" + + keyValue + + "'"; + + insert = "insert into " + tableName + + "(" + pkColumnName + ", " + valueColumnName + ") " + + "values('"+ keyValue +"', ?)"; + + + //hilo config + maxLo = PropertiesHelper.getInt(MAX_LO, params, Short.MAX_VALUE); + lo = maxLo + 1; // so we "clock over" on the first invocation + returnClass = type.getReturnedClass(); + } +} diff --git a/src/org/hibernate/id/PersistentIdentifierGenerator.java b/src/org/hibernate/id/PersistentIdentifierGenerator.java new file mode 100644 index 0000000000..8cb97d5107 --- /dev/null +++ b/src/org/hibernate/id/PersistentIdentifierGenerator.java @@ -0,0 +1,80 @@ +//$Id$ +package org.hibernate.id; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.dialect.Dialect; + +/** + * An IdentifierGenerator that requires creation of database objects. + *

    + * All PersistentIdentifierGenerators that also implement + * Configurable have access to a special mapping parameter: schema + * + * @see IdentifierGenerator + * @see Configurable + * @author Gavin King + */ +public interface PersistentIdentifierGenerator extends IdentifierGenerator { + + /** + * The configuration parameter holding the schema name + */ + public static final String SCHEMA = "schema"; + + /** + * The configuration parameter holding the table name for the + * generated id + */ + public static final String TABLE = "target_table"; + + /** + * The configuration parameter holding the table names for all + * tables for which the id must be unique + */ + public static final String TABLES = "identity_tables"; + + /** + * The configuration parameter holding the primary key column + * name of the generated id + */ + public static final String PK = "target_column"; + + /** + * The configuration parameter holding the catalog name + */ + public static final String CATALOG = "catalog"; + + /** + * The SQL required to create the underlying database objects. + * @param dialect + * @return String[] + * @throws HibernateException + */ + public String[] sqlCreateStrings(Dialect dialect) throws HibernateException; + + /** + * The SQL required to remove the underlying database objects. + * @param dialect + * @return String + * @throws HibernateException + */ + public String[] sqlDropStrings(Dialect dialect) throws HibernateException; + + /** + * Return a key unique to the underlying database objects. Prevents us from + * trying to create/remove them multiple times. + * @return Object an identifying key for this generator + */ + public Object generatorKey(); + + static final Log SQL = LogFactory.getLog("org.hibernate.SQL"); + +} + + + + + + diff --git a/src/org/hibernate/id/PostInsertIdentifierGenerator.java b/src/org/hibernate/id/PostInsertIdentifierGenerator.java new file mode 100755 index 0000000000..e20aed3bd5 --- /dev/null +++ b/src/org/hibernate/id/PostInsertIdentifierGenerator.java @@ -0,0 +1,16 @@ +//$Id$ +package org.hibernate.id; + +import org.hibernate.HibernateException; +import org.hibernate.dialect.Dialect; +import org.hibernate.id.insert.InsertGeneratedIdentifierDelegate; + +/** + * @author Gavin King + */ +public interface PostInsertIdentifierGenerator extends IdentifierGenerator { + public InsertGeneratedIdentifierDelegate getInsertGeneratedIdentifierDelegate( + PostInsertIdentityPersister persister, + Dialect dialect, + boolean isGetGeneratedKeysEnabled) throws HibernateException; +} diff --git a/src/org/hibernate/id/PostInsertIdentityPersister.java b/src/org/hibernate/id/PostInsertIdentityPersister.java new file mode 100755 index 0000000000..4329948e63 --- /dev/null +++ b/src/org/hibernate/id/PostInsertIdentityPersister.java @@ -0,0 +1,37 @@ +//$Id$ +package org.hibernate.id; + +import org.hibernate.persister.entity.EntityPersister; + +/** + * A persister that may have an identity assigned by execution of + * a SQL INSERT. + * + * @author Gavin King + */ +public interface PostInsertIdentityPersister extends EntityPersister { + /** + * Get a SQL select string that performs a select based on a unique + * key determined by the given property name). + * + * @param propertyName The name of the property which maps to the + * column(s) to use in the select statement restriction. + * @return The SQL select string + */ + public String getSelectByUniqueKeyString(String propertyName); + + /** + * Get the database-specific SQL command to retrieve the last + * generated IDENTITY value. + * + * @return The SQL command string + */ + public String getIdentitySelectString(); + + /** + * The names of the primary key columns in the root table. + * + * @return The primary key column names. + */ + public String[] getRootTableKeyColumnNames(); +} diff --git a/src/org/hibernate/id/SelectGenerator.java b/src/org/hibernate/id/SelectGenerator.java new file mode 100755 index 0000000000..9402077036 --- /dev/null +++ b/src/org/hibernate/id/SelectGenerator.java @@ -0,0 +1,138 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Properties; + +import org.hibernate.MappingException; +import org.hibernate.HibernateException; +import org.hibernate.id.insert.InsertGeneratedIdentifierDelegate; +import org.hibernate.id.insert.IdentifierGeneratingInsert; +import org.hibernate.id.insert.AbstractSelectingDelegate; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.ValueInclusion; +import org.hibernate.type.Type; + +/** + * A generator that selects the just inserted row to determine the identifier + * value assigned by the database. The correct row is located using a unique + * key. + *

    + * One mapping parameter is required: key (unless a natural-id is defined in the mapping). + * + * @author Gavin King + */ +public class SelectGenerator extends AbstractPostInsertGenerator implements Configurable { + + private String uniqueKeyPropertyName; + + public void configure(Type type, Properties params, Dialect d) throws MappingException { + uniqueKeyPropertyName = params.getProperty( "key" ); + } + + public InsertGeneratedIdentifierDelegate getInsertGeneratedIdentifierDelegate( + PostInsertIdentityPersister persister, + Dialect dialect, + boolean isGetGeneratedKeysEnabled) throws HibernateException { + return new SelectGeneratorDelegate( persister, dialect, uniqueKeyPropertyName ); + } + + private static String determineNameOfPropertyToUse(PostInsertIdentityPersister persister, String supplied) { + if ( supplied != null ) { + return supplied; + } + int[] naturalIdPropertyIndices = persister.getNaturalIdentifierProperties(); + if ( naturalIdPropertyIndices == null ){ + throw new IdentifierGenerationException( + "no natural-id property defined; need to specify [key] in " + + "generator parameters" + ); + } + if ( naturalIdPropertyIndices.length > 1 ) { + throw new IdentifierGenerationException( + "select generator does not currently support composite " + + "natural-id properties; need to specify [key] in generator parameters" + ); + } + ValueInclusion inclusion = persister.getPropertyInsertGenerationInclusions() [ naturalIdPropertyIndices[0] ]; + if ( inclusion != ValueInclusion.NONE ) { + throw new IdentifierGenerationException( + "natural-id also defined as insert-generated; need to specify [key] " + + "in generator parameters" + ); + } + return persister.getPropertyNames() [ naturalIdPropertyIndices[0] ]; + } + + + /** + * The delegate for the select generation strategy. + */ + public static class SelectGeneratorDelegate + extends AbstractSelectingDelegate + implements InsertGeneratedIdentifierDelegate { + private final PostInsertIdentityPersister persister; + private final Dialect dialect; + + private final String uniqueKeyPropertyName; + private final Type uniqueKeyType; + private final Type idType; + + private final String idSelectString; + + private SelectGeneratorDelegate( + PostInsertIdentityPersister persister, + Dialect dialect, + String suppliedUniqueKeyPropertyName) { + super( persister ); + this.persister = persister; + this.dialect = dialect; + this.uniqueKeyPropertyName = determineNameOfPropertyToUse( persister, suppliedUniqueKeyPropertyName ); + + idSelectString = persister.getSelectByUniqueKeyString( uniqueKeyPropertyName ); + uniqueKeyType = persister.getPropertyType( uniqueKeyPropertyName ); + idType = persister.getIdentifierType(); + } + + public IdentifierGeneratingInsert prepareIdentifierGeneratingInsert() { + return new IdentifierGeneratingInsert( dialect ); + } + + + // AbstractSelectingDelegate impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected String getSelectSQL() { + return idSelectString; + } + + protected void bindParameters( + SessionImplementor session, + PreparedStatement ps, + Object entity) throws SQLException { + Object uniqueKeyValue = persister.getPropertyValue( entity, uniqueKeyPropertyName, session.getEntityMode() ); + uniqueKeyType.nullSafeSet( ps, uniqueKeyValue, 1, session ); + } + + protected Serializable getResult( + SessionImplementor session, + ResultSet rs, + Object entity) throws SQLException { + if ( !rs.next() ) { + throw new IdentifierGenerationException( + "the inserted row could not be located by the unique key: " + + uniqueKeyPropertyName + ); + } + return ( Serializable ) idType.nullSafeGet( + rs, + persister.getRootTableKeyColumnNames(), + session, + entity + ); + } + } +} diff --git a/src/org/hibernate/id/SequenceGenerator.java b/src/org/hibernate/id/SequenceGenerator.java new file mode 100644 index 0000000000..e24e860c9f --- /dev/null +++ b/src/org/hibernate/id/SequenceGenerator.java @@ -0,0 +1,126 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.mapping.Table; +import org.hibernate.type.Type; +import org.hibernate.util.PropertiesHelper; + +/** + * sequence
    + *
    + * Generates long values using an oracle-style sequence. A higher + * performance algorithm is SequenceHiLoGenerator.
    + *
    + * Mapping parameters supported: sequence, parameters. + * + * @see SequenceHiLoGenerator + * @see TableHiLoGenerator + * @author Gavin King + */ + +public class SequenceGenerator implements PersistentIdentifierGenerator, Configurable { + + /** + * The sequence parameter + */ + public static final String SEQUENCE = "sequence"; + + /** + * The parameters parameter, appended to the create sequence DDL. + * For example (Oracle): INCREMENT BY 1 START WITH 1 MAXVALUE 100 NOCACHE. + */ + public static final String PARAMETERS = "parameters"; + + private String sequenceName; + private String parameters; + private Type identifierType; + private String sql; + + private static final Log log = LogFactory.getLog(SequenceGenerator.class); + + public void configure(Type type, Properties params, Dialect dialect) throws MappingException { + sequenceName = PropertiesHelper.getString(SEQUENCE, params, "hibernate_sequence"); + parameters = params.getProperty(PARAMETERS); + String schemaName = params.getProperty(SCHEMA); + String catalogName = params.getProperty(CATALOG); + + if (sequenceName.indexOf( '.' ) < 0) { + sequenceName = Table.qualify( catalogName, schemaName, sequenceName ); + } + + this.identifierType = type; + sql = dialect.getSequenceNextValString(sequenceName); + } + + public Serializable generate(SessionImplementor session, Object obj) + throws HibernateException { + + try { + + PreparedStatement st = session.getBatcher().prepareSelectStatement(sql); + try { + ResultSet rs = st.executeQuery(); + try { + rs.next(); + Serializable result = IdentifierGeneratorFactory.get( + rs, identifierType + ); + if ( log.isDebugEnabled() ) { + log.debug("Sequence identifier generated: " + result); + } + return result; + } + finally { + rs.close(); + } + } + finally { + session.getBatcher().closeStatement(st); + } + + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not get next sequence value", + sql + ); + } + + } + + public String[] sqlCreateStrings(Dialect dialect) throws HibernateException { + String[] ddl = dialect.getCreateSequenceStrings(sequenceName); + if ( parameters != null ) { + ddl[ddl.length - 1] += ' ' + parameters; + } + return ddl; + } + + public String[] sqlDropStrings(Dialect dialect) throws HibernateException { + return dialect.getDropSequenceStrings(sequenceName); + } + + public Object generatorKey() { + return sequenceName; + } + + public String getSequenceName() { + return sequenceName; + } + +} diff --git a/src/org/hibernate/id/SequenceHiLoGenerator.java b/src/org/hibernate/id/SequenceHiLoGenerator.java new file mode 100644 index 0000000000..923880f8f0 --- /dev/null +++ b/src/org/hibernate/id/SequenceHiLoGenerator.java @@ -0,0 +1,68 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.type.Type; +import org.hibernate.util.PropertiesHelper; + +/** + * seqhilo
    + *
    + * An IdentifierGenerator that combines a hi/lo algorithm with an underlying + * oracle-style sequence that generates hi values. The user may specify a + * maximum lo value to determine how often new hi values are fetched.
    + *
    + * If sequences are not available, TableHiLoGenerator might be an + * alternative.
    + *
    + * Mapping parameters supported: sequence, max_lo, parameters. + * + * @see TableHiLoGenerator + * @author Gavin King + */ +public class SequenceHiLoGenerator extends SequenceGenerator { + + public static final String MAX_LO = "max_lo"; + + private static final Log log = LogFactory.getLog(SequenceHiLoGenerator.class); + + private int maxLo; + private int lo; + private long hi; + private Class returnClass; + + public void configure(Type type, Properties params, Dialect d) throws MappingException { + super.configure(type, params, d); + maxLo = PropertiesHelper.getInt(MAX_LO, params, 9); + lo = maxLo + 1; // so we "clock over" on the first invocation + returnClass = type.getReturnedClass(); + } + + public synchronized Serializable generate(SessionImplementor session, Object obj) + throws HibernateException { + if (maxLo < 1) { + //keep the behavior consistent even for boundary usages + long val = ( (Number) super.generate(session, obj) ).longValue(); + if (val == 0) val = ( (Number) super.generate(session, obj) ).longValue(); + return IdentifierGeneratorFactory.createNumber( val, returnClass ); + } + if ( lo>maxLo ) { + long hival = ( (Number) super.generate(session, obj) ).longValue(); + lo = (hival == 0) ? 1 : 0; + hi = hival * ( maxLo+1 ); + if ( log.isDebugEnabled() ) + log.debug("new hi value: " + hival); + } + + return IdentifierGeneratorFactory.createNumber( hi + lo++, returnClass ); + } + +} diff --git a/src/org/hibernate/id/SequenceIdentityGenerator.java b/src/org/hibernate/id/SequenceIdentityGenerator.java new file mode 100644 index 0000000000..924fb5ac99 --- /dev/null +++ b/src/org/hibernate/id/SequenceIdentityGenerator.java @@ -0,0 +1,100 @@ +package org.hibernate.id; + +import org.hibernate.id.insert.InsertGeneratedIdentifierDelegate; +import org.hibernate.id.insert.AbstractReturningDelegate; +import org.hibernate.id.insert.IdentifierGeneratingInsert; +import org.hibernate.dialect.Dialect; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.sql.Insert; +import org.hibernate.util.NamedGeneratedKeysHelper; +import org.hibernate.type.Type; +import org.hibernate.engine.SessionImplementor; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Properties; + +/** + * A generator which combines sequence generation with immediate retrieval + * through JDBC3 {@link java.sql.Connection#prepareStatement(String, String[]) getGeneratedKeys}. + * In this respect it works much like ANSI-SQL IDENTITY generation. + *

    + * This generator only known to work with newer Oracle drivers compiled for + * JDK 1.4 (JDBC3). + *

    + * Note: Due to a bug in Oracle drivers, sql comments on these insert statements + * are completely disabled. + * + * @author Steve Ebersole + */ +public class SequenceIdentityGenerator extends SequenceGenerator + implements PostInsertIdentifierGenerator { + + private static final Log log = LogFactory.getLog( SequenceIdentityGenerator.class ); + + public Serializable generate(SessionImplementor s, Object obj) { + return IdentifierGeneratorFactory.POST_INSERT_INDICATOR; + } + + public InsertGeneratedIdentifierDelegate getInsertGeneratedIdentifierDelegate( + PostInsertIdentityPersister persister, + Dialect dialect, + boolean isGetGeneratedKeysEnabled) throws HibernateException { + return new Delegate( persister, dialect, getSequenceName() ); + } + + public void configure(Type type, Properties params, Dialect dialect) throws MappingException { + super.configure( type, params, dialect ); + } + + public static class Delegate extends AbstractReturningDelegate { + private final Dialect dialect; + private final String sequenceNextValFragment; + private final String[] keyColumns; + + public Delegate(PostInsertIdentityPersister persister, Dialect dialect, String sequenceName) { + super( persister ); + this.dialect = dialect; + this.sequenceNextValFragment = dialect.getSelectSequenceNextValString( sequenceName ); + this.keyColumns = getPersister().getRootTableKeyColumnNames(); + if ( keyColumns.length > 1 ) { + throw new HibernateException( "sequence-identity generator cannot be used with with multi-column keys" ); + } + } + + public IdentifierGeneratingInsert prepareIdentifierGeneratingInsert() { + NoCommentsInsert insert = new NoCommentsInsert( dialect ); + insert.addColumn( getPersister().getRootTableKeyColumnNames()[0], sequenceNextValFragment ); + return insert; + } + + protected PreparedStatement prepare(String insertSQL, SessionImplementor session) throws SQLException { + return session.getBatcher().prepareStatement( insertSQL, keyColumns ); + } + + protected Serializable executeAndExtract(PreparedStatement insert) throws SQLException { + insert.executeUpdate(); + return IdentifierGeneratorFactory.getGeneratedIdentity( + NamedGeneratedKeysHelper.getGeneratedKey( insert ), + getPersister().getIdentifierType() + ); + } + } + + public static class NoCommentsInsert extends IdentifierGeneratingInsert { + public NoCommentsInsert(Dialect dialect) { + super( dialect ); + } + + public Insert setComment(String comment) { + // don't allow comments on these insert statements as comments totally + // blow up the Oracle getGeneratedKeys "support" :( + log.info( "disallowing insert statement comment for select-identity due to Oracle driver bug" ); + return this; + } + } +} diff --git a/src/org/hibernate/id/TableGenerator.java b/src/org/hibernate/id/TableGenerator.java new file mode 100644 index 0000000000..3af8968026 --- /dev/null +++ b/src/org/hibernate/id/TableGenerator.java @@ -0,0 +1,170 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.TransactionHelper; +import org.hibernate.mapping.Table; +import org.hibernate.type.Type; +import org.hibernate.util.PropertiesHelper; + +/** + * An IdentifierGenerator that uses a database + * table to store the last generated value. It is not + * intended that applications use this strategy directly. + * However, it may be used to build other (efficient) + * strategies. The returned type is Integer.
    + *
    + * The hi value MUST be fetched in a seperate transaction + * to the Session transaction so the generator must + * be able to obtain a new connection and commit it. Hence + * this implementation may not be used when Hibernate is + * fetching connections when the user is supplying + * connections.
    + *
    + * The returned value is of type integer.
    + *
    + * Mapping parameters supported: table, column + * + * @see TableHiLoGenerator + * @author Gavin King + */ +public class TableGenerator extends TransactionHelper + implements PersistentIdentifierGenerator, Configurable { + /* COLUMN and TABLE should be renamed but it would break the public API */ + /** The column parameter */ + public static final String COLUMN = "column"; + + /** Default column name */ + public static final String DEFAULT_COLUMN_NAME = "next_hi"; + + /** The table parameter */ + public static final String TABLE = "table"; + + /** Default table name */ + public static final String DEFAULT_TABLE_NAME = "hibernate_unique_key"; + + private static final Log log = LogFactory.getLog(TableGenerator.class); + + private String tableName; + private String columnName; + private String query; + private String update; + + public void configure(Type type, Properties params, Dialect dialect) { + + tableName = PropertiesHelper.getString(TABLE, params, DEFAULT_TABLE_NAME); + columnName = PropertiesHelper.getString(COLUMN, params, DEFAULT_COLUMN_NAME); + String schemaName = params.getProperty(SCHEMA); + String catalogName = params.getProperty(CATALOG); + + if ( tableName.indexOf( '.' )<0 ) { + tableName = Table.qualify( catalogName, schemaName, tableName ); + } + + query = "select " + + columnName + + " from " + + dialect.appendLockHint(LockMode.UPGRADE, tableName) + + dialect.getForUpdateString(); + + update = "update " + + tableName + + " set " + + columnName + + " = ? where " + + columnName + + " = ?"; + } + + public synchronized Serializable generate(SessionImplementor session, Object object) + throws HibernateException { + int result = ( (Integer) doWorkInNewTransaction(session) ).intValue(); + return new Integer(result); + } + + + public String[] sqlCreateStrings(Dialect dialect) { + return new String[] { + dialect.getCreateTableString() + " " + tableName + " ( " + columnName + " " + dialect.getTypeName(Types.INTEGER) + " )", + "insert into " + tableName + " values ( 0 )" + }; + } + + public String[] sqlDropStrings(Dialect dialect) { + StringBuffer sqlDropString = new StringBuffer( "drop table " ); + if ( dialect.supportsIfExistsBeforeTableName() ) { + sqlDropString.append( "if exists " ); + } + sqlDropString.append( tableName ).append( dialect.getCascadeConstraintsString() ); + if ( dialect.supportsIfExistsAfterTableName() ) { + sqlDropString.append( " if exists" ); + } + return new String[] { sqlDropString.toString() }; + } + + public Object generatorKey() { + return tableName; + } + + public Serializable doWorkInCurrentTransaction(Connection conn, String sql) throws SQLException { + int result; + int rows; + do { + // The loop ensures atomicity of the + // select + update even for no transaction + // or read committed isolation level + + sql = query; + SQL.debug(query); + PreparedStatement qps = conn.prepareStatement(query); + try { + ResultSet rs = qps.executeQuery(); + if ( !rs.next() ) { + String err = "could not read a hi value - you need to populate the table: " + tableName; + log.error(err); + throw new IdentifierGenerationException(err); + } + result = rs.getInt(1); + rs.close(); + } + catch (SQLException sqle) { + log.error("could not read a hi value", sqle); + throw sqle; + } + finally { + qps.close(); + } + + sql = update; + SQL.debug(update); + PreparedStatement ups = conn.prepareStatement(update); + try { + ups.setInt( 1, result + 1 ); + ups.setInt( 2, result ); + rows = ups.executeUpdate(); + } + catch (SQLException sqle) { + log.error("could not update hi value in: " + tableName, sqle); + throw sqle; + } + finally { + ups.close(); + } + } + while (rows==0); + return new Integer(result); + } +} diff --git a/src/org/hibernate/id/TableHiLoGenerator.java b/src/org/hibernate/id/TableHiLoGenerator.java new file mode 100644 index 0000000000..463a77bf3b --- /dev/null +++ b/src/org/hibernate/id/TableHiLoGenerator.java @@ -0,0 +1,72 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.type.Type; +import org.hibernate.util.PropertiesHelper; + +/** + * hilo
    + *
    + * An IdentifierGenerator that returns a Long, constructed using + * a hi/lo algorithm. The hi value MUST be fetched in a seperate transaction + * to the Session transaction so the generator must be able to obtain + * a new connection and commit it. Hence this implementation may not + * be used when the user is supplying connections. In this + * case a SequenceHiLoGenerator would be a better choice (where + * supported).
    + *
    + * Mapping parameters supported: table, column, max_lo + * + * @see SequenceHiLoGenerator + * @author Gavin King + */ + +public class TableHiLoGenerator extends TableGenerator { + + /** + * The max_lo parameter + */ + public static final String MAX_LO = "max_lo"; + + private long hi; + private int lo; + private int maxLo; + private Class returnClass; + + private static final Log log = LogFactory.getLog(TableHiLoGenerator.class); + + public void configure(Type type, Properties params, Dialect d) { + super.configure(type, params, d); + maxLo = PropertiesHelper.getInt(MAX_LO, params, Short.MAX_VALUE); + lo = maxLo + 1; // so we "clock over" on the first invocation + returnClass = type.getReturnedClass(); + } + + public synchronized Serializable generate(SessionImplementor session, Object obj) + throws HibernateException { + if (maxLo < 1) { + //keep the behavior consistent even for boundary usages + long val = ( (Number) super.generate(session, obj) ).longValue(); + if (val == 0) val = ( (Number) super.generate(session, obj) ).longValue(); + return IdentifierGeneratorFactory.createNumber( val, returnClass ); + } + if (lo>maxLo) { + long hival = ( (Number) super.generate(session, obj) ).longValue(); + lo = (hival == 0) ? 1 : 0; + hi = hival * (maxLo+1); + log.debug("new hi value: " + hival); + } + + return IdentifierGeneratorFactory.createNumber( hi + lo++, returnClass ); + + } + +} diff --git a/src/org/hibernate/id/UUIDHexGenerator.java b/src/org/hibernate/id/UUIDHexGenerator.java new file mode 100644 index 0000000000..4ba7028bd8 --- /dev/null +++ b/src/org/hibernate/id/UUIDHexGenerator.java @@ -0,0 +1,75 @@ +//$Id$ +package org.hibernate.id; + +import java.io.Serializable; +import java.util.Properties; + +import org.hibernate.Hibernate; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.type.Type; +import org.hibernate.util.PropertiesHelper; + +/** + * uuid
    + *
    + * A UUIDGenerator that returns a string of length 32, + * This string will consist of only hex digits. Optionally, + * the string may be generated with separators between each + * component of the UUID. + * + * Mapping parameters supported: separator. + * + * @author Gavin King + */ + +public class UUIDHexGenerator extends AbstractUUIDGenerator implements Configurable { + + private String sep = ""; + + protected String format(int intval) { + String formatted = Integer.toHexString(intval); + StringBuffer buf = new StringBuffer("00000000"); + buf.replace( 8-formatted.length(), 8, formatted ); + return buf.toString(); + } + + protected String format(short shortval) { + String formatted = Integer.toHexString(shortval); + StringBuffer buf = new StringBuffer("0000"); + buf.replace( 4-formatted.length(), 4, formatted ); + return buf.toString(); + } + + public Serializable generate(SessionImplementor session, Object obj) { + return new StringBuffer(36) + .append( format( getIP() ) ).append(sep) + .append( format( getJVM() ) ).append(sep) + .append( format( getHiTime() ) ).append(sep) + .append( format( getLoTime() ) ).append(sep) + .append( format( getCount() ) ) + .toString(); + } + + public void configure(Type type, Properties params, Dialect d) { + sep = PropertiesHelper.getString("separator", params, ""); + } + + public static void main( String[] args ) throws Exception { + Properties props = new Properties(); + props.setProperty("separator", "/"); + IdentifierGenerator gen = new UUIDHexGenerator(); + ( (Configurable) gen ).configure(Hibernate.STRING, props, null); + IdentifierGenerator gen2 = new UUIDHexGenerator(); + ( (Configurable) gen2 ).configure(Hibernate.STRING, props, null); + + for ( int i=0; i<10; i++) { + String id = (String) gen.generate(null, null); + System.out.println(id); + String id2 = (String) gen2.generate(null, null); + System.out.println(id2); + } + + } + +} diff --git a/src/org/hibernate/id/enhanced/AccessCallback.java b/src/org/hibernate/id/enhanced/AccessCallback.java new file mode 100644 index 0000000000..509564fe29 --- /dev/null +++ b/src/org/hibernate/id/enhanced/AccessCallback.java @@ -0,0 +1,16 @@ +package org.hibernate.id.enhanced; + +/** + * Contract for providing callback access to a {@link DatabaseStructure}, + * typically from the {@link Optimizer}. + * + * @author Steve Ebersole + */ +public interface AccessCallback { + /** + * Retrieve the next value from the underlying source. + * + * @return The next value. + */ + public long getNextValue(); +} diff --git a/src/org/hibernate/id/enhanced/DatabaseStructure.java b/src/org/hibernate/id/enhanced/DatabaseStructure.java new file mode 100644 index 0000000000..3898bd05c5 --- /dev/null +++ b/src/org/hibernate/id/enhanced/DatabaseStructure.java @@ -0,0 +1,61 @@ +package org.hibernate.id.enhanced; + +import org.hibernate.engine.SessionImplementor; +import org.hibernate.dialect.Dialect; + +/** + * Encapsulates definition of the underlying data structure backing a + * sequence-style generator. + * + * @author Steve Ebersole + */ +public interface DatabaseStructure { + /** + * The name of the database structure (table or sequence). + * @return The structure name. + */ + public String getName(); + + /** + * How many times has this structure been accessed through this reference? + * @return The number of accesses. + */ + public int getTimesAccessed(); + + /** + * The configured increment size + * @return The configured increment size + */ + public int getIncrementSize(); + + /** + * A callback to be able to get the next value from the underlying + * structure as needed. + * + * @param session The session. + * @return The next value. + */ + public AccessCallback buildCallback(SessionImplementor session); + + /** + * Prepare this structure for use. Called sometime after instantiation, + * but before first use. + * + * @param optimizer The optimizer being applied to the generator. + */ + public void prepare(Optimizer optimizer); + + /** + * Commands needed to create the underlying structures. + * @param dialect The database dialect being used. + * @return The creation commands. + */ + public String[] sqlCreateStrings(Dialect dialect); + + /** + * Commands needed to drop the underlying structures. + * @param dialect The database dialect being used. + * @return The drop commands. + */ + public String[] sqlDropStrings(Dialect dialect); +} \ No newline at end of file diff --git a/src/org/hibernate/id/enhanced/Optimizer.java b/src/org/hibernate/id/enhanced/Optimizer.java new file mode 100644 index 0000000000..93a2f18dff --- /dev/null +++ b/src/org/hibernate/id/enhanced/Optimizer.java @@ -0,0 +1,53 @@ +package org.hibernate.id.enhanced; + +import java.io.Serializable; + +/** + * Performs optimization on an optimizable identifier generator. Typically + * this optimization takes the form of trying to ensure we do not have to + * hit the database on each and every request to get an identifier value. + *

    + * Optimizers work on constructor injection. They should provide + * a constructor with the following arguments

      + *
    1. java.lang.Class - The return type for the generated values
    2. + *
    3. int - The increment size
    4. + *
    + * + * @author Steve Ebersole + */ +public interface Optimizer { + /** + * Generate an identifier value accounting for this specific optimization. + * + * @param callback Callback to access the underlying value source. + * @return The generated identifier value. + */ + public Serializable generate(AccessCallback callback); + + /** + * A common means to access the last value obtained from the underlying + * source. This is intended for testing purposes, since accessing the + * unerlying database source directly is much more difficult. + * + * @return The last value we obtained from the underlying source; + * -1 indicates we have not yet consulted with the source. + */ + public long getLastSourceValue(); + + /** + * Retrieves the defined increment size. + * + * @return The increment size. + */ + public int getIncrementSize(); + + /** + * Are increments to be applied to the values stored in the underlying + * value source? + * + * @return True if the values in the source are to be incremented + * according to the defined increment size; false otherwise, in which + * case the increment is totally an in memory construct. + */ + public boolean applyIncrementSizeToSourceValues(); +} diff --git a/src/org/hibernate/id/enhanced/OptimizerFactory.java b/src/org/hibernate/id/enhanced/OptimizerFactory.java new file mode 100644 index 0000000000..f3e468605f --- /dev/null +++ b/src/org/hibernate/id/enhanced/OptimizerFactory.java @@ -0,0 +1,202 @@ +package org.hibernate.id.enhanced; + +import java.io.Serializable; +import java.lang.reflect.Constructor; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.HibernateException; +import org.hibernate.util.ReflectHelper; +import org.hibernate.id.IdentifierGeneratorFactory; + +/** + * Factory for {@link Optimizer} instances. + * + * @author Steve Ebersole + */ +public class OptimizerFactory { + private static final Log log = LogFactory.getLog( OptimizerFactory.class ); + + public static final String NONE = "none"; + public static final String HILO = "hilo"; + public static final String POOL = "pooled"; + + private static Class[] CTOR_SIG = new Class[] { Class.class, int.class }; + + public static Optimizer buildOptimizer(String type, Class returnClass, int incrementSize) { + String optimizerClassName; + if ( NONE.equals( type ) ) { + optimizerClassName = NoopOptimizer.class.getName(); + } + else if ( HILO.equals( type ) ) { + optimizerClassName = HiLoOptimizer.class.getName(); + } + else if ( POOL.equals( type ) ) { + optimizerClassName = PooledOptimizer.class.getName(); + } + else { + optimizerClassName = type; + } + + try { + Class optimizerClass = ReflectHelper.classForName( optimizerClassName ); + Constructor ctor = optimizerClass.getConstructor( CTOR_SIG ); + return ( Optimizer ) ctor.newInstance( new Object[] { returnClass, new Integer( incrementSize ) } ); + } + catch( Throwable ignore ) { + // intentionally empty + } + + // the default... + return new NoopOptimizer( returnClass, incrementSize ); + } + + public static abstract class OptimizerSupport implements Optimizer { + protected final Class returnClass; + protected final int incrementSize; + + protected OptimizerSupport(Class returnClass, int incrementSize) { + if ( returnClass == null ) { + throw new HibernateException( "return class is required" ); + } + this.returnClass = returnClass; + this.incrementSize = incrementSize; + } + + protected Serializable make(long value) { + return IdentifierGeneratorFactory.createNumber( value, returnClass ); + } + + public Class getReturnClass() { + return returnClass; + } + + public int getIncrementSize() { + return incrementSize; + } + } + + public static class NoopOptimizer extends OptimizerSupport { + private long lastSourceValue = -1; + + public NoopOptimizer(Class returnClass, int incrementSize) { + super( returnClass, incrementSize ); + } + + public Serializable generate(AccessCallback callback) { + if ( lastSourceValue == -1 ) { + while( lastSourceValue <= 0 ) { + lastSourceValue = callback.getNextValue(); + } + } + else { + lastSourceValue = callback.getNextValue(); + } + return make( lastSourceValue ); + } + + public long getLastSourceValue() { + return lastSourceValue; + } + + public boolean applyIncrementSizeToSourceValues() { + return false; + } + } + + public static class HiLoOptimizer extends OptimizerSupport { + private long lastSourceValue = -1; + private long value; + private long hiValue; + + public HiLoOptimizer(Class returnClass, int incrementSize) { + super( returnClass, incrementSize ); + if ( incrementSize < 1 ) { + throw new HibernateException( "increment size cannot be less than 1" ); + } + if ( log.isTraceEnabled() ) { + log.trace( "creating hilo optimizer with [incrementSize=" + incrementSize + "; returnClass=" + returnClass.getName() + "]" ); + } + } + + public Serializable generate(AccessCallback callback) { + if ( lastSourceValue < 0 ) { + lastSourceValue = callback.getNextValue(); + while ( lastSourceValue <= 0 ) { + lastSourceValue = callback.getNextValue(); + } + hiValue = ( lastSourceValue * incrementSize ) + 1; + value = hiValue - incrementSize; + } + else if ( value >= hiValue ) { + lastSourceValue = callback.getNextValue(); + hiValue = ( lastSourceValue * incrementSize ) + 1; + } + return make( value++ ); + } + + + public long getLastSourceValue() { + return lastSourceValue; + } + + public boolean applyIncrementSizeToSourceValues() { + return false; + } + + public long getLastValue() { + return value - 1; + } + + public long getHiValue() { + return hiValue; + } + } + + public static class PooledOptimizer extends OptimizerSupport { + private long value; + private long hiValue = -1; + + public PooledOptimizer(Class returnClass, int incrementSize) { + super( returnClass, incrementSize ); + if ( incrementSize < 1 ) { + throw new HibernateException( "increment size cannot be less than 1" ); + } + if ( log.isTraceEnabled() ) { + log.trace( "creating pooled optimizer with [incrementSize=" + incrementSize + "; returnClass=" + returnClass.getName() + "]" ); + } + } + + public Serializable generate(AccessCallback callback) { + if ( hiValue < 0 ) { + value = callback.getNextValue(); + if ( value < 1 ) { + // unfortunately not really safe to normalize this + // to 1 as an initial value like we do the others + // because we would not be able to control this if + // we are using a sequence... + log.info( "pooled optimizer source reported [" + value + "] as the initial value; use of 1 or greater highly recommended" ); + } + hiValue = callback.getNextValue(); + } + else if ( value >= hiValue ) { + hiValue = callback.getNextValue(); + value = hiValue - incrementSize; + } + return make( value++ ); + } + + public long getLastSourceValue() { + return hiValue; + } + + public boolean applyIncrementSizeToSourceValues() { + return true; + } + + public long getLastValue() { + return value - 1; + } + } +} diff --git a/src/org/hibernate/id/enhanced/SequenceStructure.java b/src/org/hibernate/id/enhanced/SequenceStructure.java new file mode 100644 index 0000000000..2c4408147b --- /dev/null +++ b/src/org/hibernate/id/enhanced/SequenceStructure.java @@ -0,0 +1,103 @@ +package org.hibernate.id.enhanced; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.HibernateException; + +/** + * Describes a sequence. + * + * @author Steve Ebersole + */ +public class SequenceStructure implements DatabaseStructure { + private static final Log log = LogFactory.getLog( SequenceStructure.class ); + + private final String sequenceName; + private final int initialValue; + private final int incrementSize; + private final String sql; + private boolean applyIncrementSizeToSourceValues; + private int accessCounter; + + public SequenceStructure(Dialect dialect, String sequenceName, int initialValue, int incrementSize) { + this.sequenceName = sequenceName; + this.initialValue = initialValue; + this.incrementSize = incrementSize; + sql = dialect.getSequenceNextValString( sequenceName ); + } + + public String getName() { + return sequenceName; + } + + public int getIncrementSize() { + return incrementSize; + } + + public int getTimesAccessed() { + return accessCounter; + } + + public AccessCallback buildCallback(final SessionImplementor session) { + return new AccessCallback() { + public long getNextValue() { + accessCounter++; + try { + PreparedStatement st = session.getBatcher().prepareSelectStatement( sql ); + try { + ResultSet rs = st.executeQuery(); + try { + rs.next(); + long result = rs.getLong( 1 ); + if ( log.isDebugEnabled() ) { + log.debug("Sequence identifier generated: " + result); + } + return result; + } + finally { + try { + rs.close(); + } + catch( Throwable ignore ) { + // intentionally empty + } + } + } + finally { + session.getBatcher().closeStatement( st ); + } + + } + catch ( SQLException sqle) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not get next sequence value", + sql + ); + } + } + }; + } + + public void prepare(Optimizer optimizer) { + applyIncrementSizeToSourceValues = optimizer.applyIncrementSizeToSourceValues(); + } + + public String[] sqlCreateStrings(Dialect dialect) throws HibernateException { + int sourceIncrementSize = applyIncrementSizeToSourceValues ? incrementSize : 1; + return dialect.getCreateSequenceStrings( sequenceName, initialValue, sourceIncrementSize ); + } + + public String[] sqlDropStrings(Dialect dialect) throws HibernateException { + return dialect.getDropSequenceStrings( sequenceName ); + } +} diff --git a/src/org/hibernate/id/enhanced/SequenceStyleGenerator.java b/src/org/hibernate/id/enhanced/SequenceStyleGenerator.java new file mode 100644 index 0000000000..30f315989a --- /dev/null +++ b/src/org/hibernate/id/enhanced/SequenceStyleGenerator.java @@ -0,0 +1,175 @@ +package org.hibernate.id.enhanced; + +import java.util.Properties; +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.id.PersistentIdentifierGenerator; +import org.hibernate.id.Configurable; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.mapping.Table; +import org.hibernate.util.PropertiesHelper; +import org.hibernate.type.Type; +import org.hibernate.dialect.Dialect; + +/** + * Generates identifier values based on an sequence-style database structure. + * Variations range from actually using a sequence to using a table to mimic + * a sequence. These variations are encapsulated by the {@link DatabaseStructure} + * interface internally. + *

    + * General configuration parameters: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
    NAMEDEFAULTDESCRIPTION
    {@link #SEQUENCE_PARAM}{@link #DEF_SEQUENCE_NAME}The name of the sequence/table to use to store/retrieve values
    {@link #INITIAL_PARAM}{@link #DEFAULT_INITIAL_VALUE}The initial value to be stored for the given segment; the effect in terms of storage varies based on {@link Optimizer} and {@link DatabaseStructure}
    {@link #INCREMENT_PARAM}{@link #DEFAULT_INCREMENT_SIZE}The increment size for the underlying segment; the effect in terms of storage varies based on {@link Optimizer} and {@link DatabaseStructure}
    {@link #OPT_PARAM}depends on defined increment sizeAllows explicit definition of which optimization strategy to use
    {@link #FORCE_TBL_PARAM}falseAllows explicit definition of which optimization strategy to use
    + *

    + * Configuration parameters used specifically when the underlying structure is a table: + * + * + * + * + * + * + * + * + * + * + * + *
    NAMEDEFAULTDESCRIPTION
    {@link #VALUE_COLUMN_PARAM}{@link #DEF_VALUE_COLUMN}The name of column which holds the sequence value for the given segment
    + * + * @author Steve Ebersole + */ +public class SequenceStyleGenerator implements PersistentIdentifierGenerator, Configurable { + private static final Log log = LogFactory.getLog( SequenceStyleGenerator.class ); + + // general purpose parameters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + public static final String SEQUENCE_PARAM = "sequence_name"; + public static final String DEF_SEQUENCE_NAME = "hibernate_sequence"; + + public static final String INITIAL_PARAM = "initial_value"; + public static final int DEFAULT_INITIAL_VALUE = 1; + + public static final String INCREMENT_PARAM = "increment_size"; + public static final int DEFAULT_INCREMENT_SIZE = 1; + + public static final String OPT_PARAM = "optimizer"; + + public static final String FORCE_TBL_PARAM = "force_table_use"; + + + // table-specific parameters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + public static final String VALUE_COLUMN_PARAM = "value_column"; + public static final String DEF_VALUE_COLUMN = "next_val"; + + + // state ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + private DatabaseStructure databaseStructure; + private Optimizer optimizer; + private Type identifierType; + + public DatabaseStructure getDatabaseStructure() { + return databaseStructure; + } + + public Optimizer getOptimizer() { + return optimizer; + } + + public Type getIdentifierType() { + return identifierType; + } + + + // Configurable implementation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void configure(Type type, Properties params, Dialect dialect) throws MappingException { + identifierType = type; + boolean forceTableUse = PropertiesHelper.getBoolean( FORCE_TBL_PARAM, params, false ); + + String sequenceName = PropertiesHelper.getString( SEQUENCE_PARAM, params, DEF_SEQUENCE_NAME ); + if ( sequenceName.indexOf( '.' ) < 0 ) { + String schemaName = params.getProperty( SCHEMA ); + String catalogName = params.getProperty( CATALOG ); + sequenceName = Table.qualify( catalogName, schemaName, sequenceName ); + } + int initialValue = PropertiesHelper.getInt( INITIAL_PARAM, params, DEFAULT_INITIAL_VALUE ); + int incrementSize = PropertiesHelper.getInt( INCREMENT_PARAM, params, DEFAULT_INCREMENT_SIZE ); + + String valueColumnName = PropertiesHelper.getString( VALUE_COLUMN_PARAM, params, DEF_VALUE_COLUMN ); + + String defOptStrategy = incrementSize <= 1 ? OptimizerFactory.NONE : OptimizerFactory.POOL; + String optimizationStrategy = PropertiesHelper.getString( OPT_PARAM, params, defOptStrategy ); + if ( OptimizerFactory.NONE.equals( optimizationStrategy ) && incrementSize > 1 ) { + log.warn( "config specified explicit optimizer of [" + OptimizerFactory.NONE + "], but [" + INCREMENT_PARAM + "=" + incrementSize + "; honoring optimizer setting" ); + incrementSize = 1; + } + if ( dialect.supportsSequences() && !forceTableUse ) { + if ( OptimizerFactory.POOL.equals( optimizationStrategy ) && !dialect.supportsPooledSequences() ) { + // TODO : may even be better to fall back to a pooled table strategy here so that the db stored values remain consistent... + optimizationStrategy = OptimizerFactory.HILO; + } + databaseStructure = new SequenceStructure( dialect, sequenceName, initialValue, incrementSize ); + } + else { + databaseStructure = new TableStructure( dialect, sequenceName, valueColumnName, initialValue, incrementSize ); + } + + optimizer = OptimizerFactory.buildOptimizer( optimizationStrategy, identifierType.getReturnedClass(), incrementSize ); + databaseStructure.prepare( optimizer ); + } + + + // IdentifierGenerator implementation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Serializable generate(SessionImplementor session, Object object) throws HibernateException { + return optimizer.generate( databaseStructure.buildCallback( session ) ); + } + + + // PersistentIdentifierGenerator implementation ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Object generatorKey() { + return databaseStructure.getName(); + } + + public String[] sqlCreateStrings(Dialect dialect) throws HibernateException { + return databaseStructure.sqlCreateStrings( dialect ); + } + + public String[] sqlDropStrings(Dialect dialect) throws HibernateException { + return databaseStructure.sqlDropStrings( dialect ); + } + +} diff --git a/src/org/hibernate/id/enhanced/TableGenerator.java b/src/org/hibernate/id/enhanced/TableGenerator.java new file mode 100644 index 0000000000..cce5d4dd30 --- /dev/null +++ b/src/org/hibernate/id/enhanced/TableGenerator.java @@ -0,0 +1,317 @@ +package org.hibernate.id.enhanced; + +import java.sql.Types; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.util.Properties; +import java.util.HashMap; +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.engine.TransactionHelper; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.id.PersistentIdentifierGenerator; +import org.hibernate.id.Configurable; +import org.hibernate.type.Type; +import org.hibernate.dialect.Dialect; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.LockMode; +import org.hibernate.mapping.Table; +import org.hibernate.util.PropertiesHelper; +import org.hibernate.util.StringHelper; +import org.hibernate.util.CollectionHelper; + +/** + * A "segmented" version of the enhanced table generator. The term "segmented" + * refers to the fact that this table can hold multiple value generators, + * segmented by a key. + *

    + * Configuration parameters: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
    NAMEDEFAULTDESCRIPTION
    {@link #TABLE_PARAM}{@link #DEF_TABLE}The name of the table to use to store/retrieve values
    {@link #VALUE_COLUMN_PARAM}{@link #DEF_VALUE_COLUMN}The name of column which holds the sequence value for the given segment
    {@link #SEGMENT_COLUMN_PARAM}{@link #DEF_SEGMENT_COLUMN}The name of the column which holds the segment key
    {@link #SEGMENT_VALUE_PARAM}{@link #DEF_SEGMENT_VALUE}The value indicating which segment is used by this generator; refers to values in the {@link #SEGMENT_COLUMN_PARAM} column
    {@link #SEGMENT_LENGTH_PARAM}{@link #DEF_SEGMENT_LENGTH}The data length of the {@link #SEGMENT_COLUMN_PARAM} column; used for schema creation
    {@link #INITIAL_PARAM}{@link #DEFAULT_INITIAL_VALUE}The initial value to be stored for the given segment
    {@link #INCREMENT_PARAM}{@link #DEFAULT_INCREMENT_SIZE}The increment size for the underlying segment; see the discussion on {@link Optimizer} for more details.
    {@link #OPT_PARAM}depends on defined increment sizeAllows explicit definition of which optimization strategy to use
    + * + * @author Steve Ebersole + */ +public class TableGenerator extends TransactionHelper implements PersistentIdentifierGenerator, Configurable { + private static final Log log = LogFactory.getLog( TableGenerator.class ); + + public static final String TABLE_PARAM = "table_name"; + public static final String DEF_TABLE = "hibernate_sequences"; + + public static final String VALUE_COLUMN_PARAM = "value_column_name"; + public static final String DEF_VALUE_COLUMN = "next_val"; + + public static final String SEGMENT_COLUMN_PARAM = "segment_column_name"; + public static final String DEF_SEGMENT_COLUMN = "sequence_name"; + + public static final String SEGMENT_VALUE_PARAM = "segment_value"; + public static final String DEF_SEGMENT_VALUE = "default"; + + public static final String SEGMENT_LENGTH_PARAM = "segment_value_length"; + public static final int DEF_SEGMENT_LENGTH = 255; + + public static final String INITIAL_PARAM = "initial_value"; + public static final int DEFAULT_INITIAL_VALUE = 1; + + public static final String INCREMENT_PARAM = "increment_size"; + public static final int DEFAULT_INCREMENT_SIZE = 1; + + public static final String OPT_PARAM = "optimizer"; + + + private String tableName; + private String valueColumnName; + private String segmentColumnName; + private String segmentValue; + private int segmentValueLength; + private int initialValue; + private int incrementSize; + + private Type identifierType; + + private String query; + private String insert; + private String update; + + private Optimizer optimizer; + private long accessCount = 0; + + public String getTableName() { + return tableName; + } + + public String getSegmentColumnName() { + return segmentColumnName; + } + + public String getSegmentValue() { + return segmentValue; + } + + public int getSegmentValueLength() { + return segmentValueLength; + } + + public String getValueColumnName() { + return valueColumnName; + } + + public Type getIdentifierType() { + return identifierType; + } + + public int getInitialValue() { + return initialValue; + } + + public int getIncrementSize() { + return incrementSize; + } + + public Optimizer getOptimizer() { + return optimizer; + } + + public long getTableAccessCount() { + return accessCount; + } + + public void configure(Type type, Properties params, Dialect dialect) throws MappingException { + tableName = PropertiesHelper.getString( TABLE_PARAM, params, DEF_TABLE ); + if ( tableName.indexOf( '.' ) < 0 ) { + String schemaName = params.getProperty( SCHEMA ); + String catalogName = params.getProperty( CATALOG ); + tableName = Table.qualify( catalogName, schemaName, tableName ); + } + + segmentColumnName = PropertiesHelper.getString( SEGMENT_COLUMN_PARAM, params, DEF_SEGMENT_COLUMN ); + segmentValue = params.getProperty( SEGMENT_VALUE_PARAM ); + if ( StringHelper.isEmpty( segmentValue ) ) { + log.debug( "explicit segment value for id generator [" + tableName + '.' + segmentColumnName + "] suggested; using default [" + DEF_SEGMENT_VALUE + "]" ); + segmentValue = DEF_SEGMENT_VALUE; + } + segmentValueLength = PropertiesHelper.getInt( SEGMENT_LENGTH_PARAM, params, DEF_SEGMENT_LENGTH ); + valueColumnName = PropertiesHelper.getString( VALUE_COLUMN_PARAM, params, DEF_VALUE_COLUMN ); + initialValue = PropertiesHelper.getInt( INITIAL_PARAM, params, DEFAULT_INITIAL_VALUE ); + incrementSize = PropertiesHelper.getInt( INCREMENT_PARAM, params, DEFAULT_INCREMENT_SIZE ); + identifierType = type; + + String query = "select " + valueColumnName + + " from " + tableName + " tbl" + + " where tbl." + segmentColumnName + "=?"; + HashMap lockMap = new HashMap(); + lockMap.put( "tbl", LockMode.UPGRADE ); + this.query = dialect.applyLocksToSql( query, lockMap, CollectionHelper.EMPTY_MAP ); + + update = "update " + tableName + + " set " + valueColumnName + "=? " + + " where " + valueColumnName + "=? and " + segmentColumnName + "=?"; + + insert = "insert into " + tableName + " (" + segmentColumnName + ", " + valueColumnName + ") " + " values (?,?)"; + + String defOptStrategy = incrementSize <= 1 ? OptimizerFactory.NONE : OptimizerFactory.POOL; + String optimizationStrategy = PropertiesHelper.getString( OPT_PARAM, params, defOptStrategy ); + optimizer = OptimizerFactory.buildOptimizer( optimizationStrategy, identifierType.getReturnedClass(), incrementSize ); + } + + public synchronized Serializable generate(final SessionImplementor session, Object obj) { + return optimizer.generate( + new AccessCallback() { + public long getNextValue() { + return ( ( Number ) doWorkInNewTransaction( session ) ).longValue(); + } + } + ); + } + + public Serializable doWorkInCurrentTransaction(Connection conn, String sql) throws SQLException { + int result; + int rows; + do { + sql = query; + SQL.debug( sql ); + PreparedStatement queryPS = conn.prepareStatement( query ); + try { + queryPS.setString( 1, segmentValue ); + ResultSet queryRS = queryPS.executeQuery(); + if ( !queryRS.next() ) { + PreparedStatement insertPS = null; + try { + result = initialValue; + sql = insert; + SQL.debug( sql ); + insertPS = conn.prepareStatement( insert ); + insertPS.setString( 1, segmentValue ); + insertPS.setLong( 2, result ); + insertPS.execute(); + } + finally { + if ( insertPS != null ) { + insertPS.close(); + } + } + } + else { + result = queryRS.getInt( 1 ); + } + queryRS.close(); + } + catch ( SQLException sqle ) { + log.error( "could not read or init a hi value", sqle ); + throw sqle; + } + finally { + queryPS.close(); + } + + sql = update; + SQL.debug( sql ); + PreparedStatement updatePS = conn.prepareStatement( update ); + try { + long newValue = optimizer.applyIncrementSizeToSourceValues() + ? result + incrementSize : result + 1; + updatePS.setLong( 1, newValue ); + updatePS.setLong( 2, result ); + updatePS.setString( 3, segmentValue ); + rows = updatePS.executeUpdate(); + } + catch ( SQLException sqle ) { + log.error( "could not update hi value in: " + tableName, sqle ); + throw sqle; + } + finally { + updatePS.close(); + } + } + while ( rows == 0 ); + + accessCount++; + + return new Integer( result ); + } + + public String[] sqlCreateStrings(Dialect dialect) throws HibernateException { + return new String[] { + new StringBuffer() + .append( dialect.getCreateTableString() ) + .append( ' ' ) + .append( tableName ) + .append( " ( " ) + .append( segmentColumnName ) + .append( ' ' ) + .append( dialect.getTypeName( Types.VARCHAR, segmentValueLength, 0, 0 ) ) + .append( ", " ) + .append( valueColumnName ) + .append( ' ' ) + .append( dialect.getTypeName( Types.BIGINT ) ) + .append( " ) " ) + .toString() + }; + } + + public String[] sqlDropStrings(Dialect dialect) throws HibernateException { + StringBuffer sqlDropString = new StringBuffer().append( "drop table " ); + if ( dialect.supportsIfExistsBeforeTableName() ) { + sqlDropString.append( "if exists " ); + } + sqlDropString.append( tableName ).append( dialect.getCascadeConstraintsString() ); + if ( dialect.supportsIfExistsAfterTableName() ) { + sqlDropString.append( " if exists" ); + } + return new String[] { sqlDropString.toString() }; + } + + public Object generatorKey() { + return tableName; + } +} diff --git a/src/org/hibernate/id/enhanced/TableStructure.java b/src/org/hibernate/id/enhanced/TableStructure.java new file mode 100644 index 0000000000..40b3edb887 --- /dev/null +++ b/src/org/hibernate/id/enhanced/TableStructure.java @@ -0,0 +1,144 @@ +package org.hibernate.id.enhanced; + +import java.sql.Types; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.io.Serializable; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.hibernate.dialect.Dialect; +import org.hibernate.LockMode; +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.TransactionHelper; +import org.hibernate.id.IdentifierGenerationException; + +/** + * Describes a table used to mimic sequence behavior + * + * @author Steve Ebersole + */ +public class TableStructure extends TransactionHelper implements DatabaseStructure { + private static final Log log = LogFactory.getLog( TableStructure.class ); + private static final Log SQL_LOG = LogFactory.getLog( "org.hibernate.SQL" ); + + private final String tableName; + private final String valueColumnName; + private final int initialValue; + private final int incrementSize; + private final String select; + private final String update; + private boolean applyIncrementSizeToSourceValues; + private int accessCounter; + + public TableStructure(Dialect dialect, String tableName, String valueColumnName, int initialValue, int incrementSize) { + this.tableName = tableName; + this.initialValue = initialValue; + this.incrementSize = incrementSize; + this.valueColumnName = valueColumnName; + + select = "select " + valueColumnName + " id_val" + + " from " + dialect.appendLockHint( LockMode.UPGRADE, tableName ) + + dialect.getForUpdateString(); + + update = "update " + tableName + + " set " + valueColumnName + "= ?" + + " where " + valueColumnName + "=?"; + } + + public String getName() { + return tableName; + } + + public int getIncrementSize() { + return incrementSize; + } + + public int getTimesAccessed() { + return accessCounter; + } + + public void prepare(Optimizer optimizer) { + applyIncrementSizeToSourceValues = optimizer.applyIncrementSizeToSourceValues(); + } + + public AccessCallback buildCallback(final SessionImplementor session) { + return new AccessCallback() { + public long getNextValue() { + return ( ( Number ) doWorkInNewTransaction( session ) ).longValue(); + } + }; + } + + public String[] sqlCreateStrings(Dialect dialect) throws HibernateException { + return new String[] { + "create table " + tableName + " ( " + valueColumnName + " " + dialect.getTypeName( Types.BIGINT ) + " )", + "insert into " + tableName + " values ( " + initialValue + " )" + }; + } + + public String[] sqlDropStrings(Dialect dialect) throws HibernateException { + StringBuffer sqlDropString = new StringBuffer().append( "drop table " ); + if ( dialect.supportsIfExistsBeforeTableName() ) { + sqlDropString.append( "if exists " ); + } + sqlDropString.append( tableName ).append( dialect.getCascadeConstraintsString() ); + if ( dialect.supportsIfExistsAfterTableName() ) { + sqlDropString.append( " if exists" ); + } + return new String[] { sqlDropString.toString() }; + } + + protected Serializable doWorkInCurrentTransaction(Connection conn, String sql) throws SQLException { + long result; + int rows; + do { + sql = select; + SQL_LOG.debug( sql ); + PreparedStatement qps = conn.prepareStatement( select ); + try { + ResultSet rs = qps.executeQuery(); + if ( !rs.next() ) { + String err = "could not read a hi value - you need to populate the table: " + tableName; + log.error( err ); + throw new IdentifierGenerationException( err ); + } + result = rs.getLong( 1 ); + rs.close(); + } + catch ( SQLException sqle ) { + log.error( "could not read a hi value", sqle ); + throw sqle; + } + finally { + qps.close(); + } + + sql = update; + SQL_LOG.debug( sql ); + PreparedStatement ups = conn.prepareStatement( update ); + try { + int increment = applyIncrementSizeToSourceValues ? incrementSize : 1; + ups.setLong( 1, result + increment ); + ups.setLong( 2, result ); + rows = ups.executeUpdate(); + } + catch ( SQLException sqle ) { + log.error( "could not update hi value in: " + tableName, sqle ); + throw sqle; + } + finally { + ups.close(); + } + } while ( rows == 0 ); + + accessCounter++; + + return new Long( result ); + } + +} diff --git a/src/org/hibernate/id/insert/AbstractReturningDelegate.java b/src/org/hibernate/id/insert/AbstractReturningDelegate.java new file mode 100644 index 0000000000..f8f837b2b1 --- /dev/null +++ b/src/org/hibernate/id/insert/AbstractReturningDelegate.java @@ -0,0 +1,60 @@ +package org.hibernate.id.insert; + +import org.hibernate.id.PostInsertIdentityPersister; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.pretty.MessageHelper; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.SQLException; + +/** + * Abstract InsertGeneratedIdentifierDelegate implementation where the + * underlying strategy causes the enerated identitifer to be returned as an + * effect of performing the insert statement. Thus, there is no need for an + * additional sql statement to determine the generated identitifer. + * + * @author Steve Ebersole + */ +public abstract class AbstractReturningDelegate implements InsertGeneratedIdentifierDelegate { + private final PostInsertIdentityPersister persister; + + public AbstractReturningDelegate(PostInsertIdentityPersister persister) { + this.persister = persister; + } + + public final Serializable performInsert(String insertSQL, SessionImplementor session, Binder binder) { + try { + // prepare and execute the insert + PreparedStatement insert = prepare( insertSQL, session ); + try { + binder.bindValues( insert ); + return executeAndExtract( insert ); + } + finally { + releaseStatement( insert, session ); + } + } + catch ( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not insert: " + MessageHelper.infoString( persister ), + insertSQL + ); + } + } + + protected PostInsertIdentityPersister getPersister() { + return persister; + } + + protected abstract PreparedStatement prepare(String insertSQL, SessionImplementor session) throws SQLException; + + protected abstract Serializable executeAndExtract(PreparedStatement insert) throws SQLException; + + protected void releaseStatement(PreparedStatement insert, SessionImplementor session) throws SQLException { + session.getBatcher().closeStatement( insert ); + } +} diff --git a/src/org/hibernate/id/insert/AbstractSelectingDelegate.java b/src/org/hibernate/id/insert/AbstractSelectingDelegate.java new file mode 100644 index 0000000000..b8f1f4ba3a --- /dev/null +++ b/src/org/hibernate/id/insert/AbstractSelectingDelegate.java @@ -0,0 +1,113 @@ +package org.hibernate.id.insert; + +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.id.PostInsertIdentityPersister; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.ResultSet; +import java.io.Serializable; + +/** + * Abstract InsertGeneratedIdentifierDelegate implementation where the + * underlying strategy requires an subsequent select after the insert + * to determine the generated identifier. + * + * @author Steve Ebersole + */ +public abstract class AbstractSelectingDelegate implements InsertGeneratedIdentifierDelegate { + private final PostInsertIdentityPersister persister; + + protected AbstractSelectingDelegate(PostInsertIdentityPersister persister) { + this.persister = persister; + } + + public final Serializable performInsert(String insertSQL, SessionImplementor session, Binder binder) { + try { + // prepare and execute the insert + PreparedStatement insert = session.getBatcher().prepareStatement( insertSQL, false ); + try { + binder.bindValues( insert ); + insert.executeUpdate(); + } + finally { + session.getBatcher().closeStatement( insert ); + } + } + catch ( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not insert: " + MessageHelper.infoString( persister ), + insertSQL + ); + } + + final String selectSQL = getSelectSQL(); + + try { + //fetch the generated id in a separate query + PreparedStatement idSelect = session.getBatcher().prepareStatement( selectSQL ); + try { + bindParameters( session, idSelect, binder.getEntity() ); + ResultSet rs = idSelect.executeQuery(); + try { + return getResult( session, rs, binder.getEntity() ); + } + finally { + rs.close(); + } + } + finally { + session.getBatcher().closeStatement( idSelect ); + } + + } + catch ( SQLException sqle ) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not retrieve generated id after insert: " + MessageHelper.infoString( persister ), + insertSQL + ); + } + } + + /** + * Get the SQL statement to be used to retrieve generated key values. + * + * @return The SQL command string + */ + protected abstract String getSelectSQL(); + + /** + * Bind any required parameter values into the SQL command {@link #getSelectSQL}. + * + * @param session The session + * @param ps The prepared {@link #getSelectSQL SQL} command + * @param entity The entity being saved. + * @throws SQLException + */ + protected void bindParameters( + SessionImplementor session, + PreparedStatement ps, + Object entity) throws SQLException { + } + + /** + * Extract the generated key value from the given result set. + * + * @param session The session + * @param rs The result set containing the generated primay key values. + * @param entity The entity being saved. + * @return The generated identifier + * @throws SQLException + */ + protected abstract Serializable getResult( + SessionImplementor session, + ResultSet rs, + Object entity) throws SQLException; + +} diff --git a/src/org/hibernate/id/insert/Binder.java b/src/org/hibernate/id/insert/Binder.java new file mode 100644 index 0000000000..bc675cc3af --- /dev/null +++ b/src/org/hibernate/id/insert/Binder.java @@ -0,0 +1,12 @@ +package org.hibernate.id.insert; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +/** + * @author Steve Ebersole + */ +public interface Binder { + public void bindValues(PreparedStatement ps) throws SQLException; + public Object getEntity(); +} diff --git a/src/org/hibernate/id/insert/IdentifierGeneratingInsert.java b/src/org/hibernate/id/insert/IdentifierGeneratingInsert.java new file mode 100644 index 0000000000..571de2b36a --- /dev/null +++ b/src/org/hibernate/id/insert/IdentifierGeneratingInsert.java @@ -0,0 +1,17 @@ +package org.hibernate.id.insert; + +import org.hibernate.sql.Insert; +import org.hibernate.dialect.Dialect; + +/** + * Nothing more than a distinguishing subclass of Insert used to indicate + * intent. Some subclasses of this also provided some additional + * functionality or semantic to the genernated SQL statement string. + * + * @author Steve Ebersole + */ +public class IdentifierGeneratingInsert extends Insert { + public IdentifierGeneratingInsert(Dialect dialect) { + super( dialect ); + } +} diff --git a/src/org/hibernate/id/insert/InsertGeneratedIdentifierDelegate.java b/src/org/hibernate/id/insert/InsertGeneratedIdentifierDelegate.java new file mode 100644 index 0000000000..5ff50442be --- /dev/null +++ b/src/org/hibernate/id/insert/InsertGeneratedIdentifierDelegate.java @@ -0,0 +1,39 @@ +package org.hibernate.id.insert; + +import org.hibernate.engine.SessionImplementor; + +import java.io.Serializable; +import java.sql.PreparedStatement; +import java.sql.SQLException; + +/** + * Responsible for handling delegation relating to variants in how + * insert-generated-identifier generator strategies dictate processing:

      + *
    • building the sql insert statement + *
    • determination of the generated identifier value + *
    + * + * @author Steve Ebersole + */ +public interface InsertGeneratedIdentifierDelegate { + + /** + * Build a {@link org.hibernate.sql.Insert} specific to the delegate's mode + * of handling generated key values. + * + * @return The insert object. + */ + public IdentifierGeneratingInsert prepareIdentifierGeneratingInsert(); + + /** + * Perform the indicated insert SQL statement and determine the identifier value + * generated. + * + * @param insertSQL + * @param session + * @param binder + * @return The generated identifier value. + */ + public Serializable performInsert(String insertSQL, SessionImplementor session, Binder binder); + +} diff --git a/src/org/hibernate/id/insert/InsertSelectIdentityInsert.java b/src/org/hibernate/id/insert/InsertSelectIdentityInsert.java new file mode 100644 index 0000000000..d0c2fb90fe --- /dev/null +++ b/src/org/hibernate/id/insert/InsertSelectIdentityInsert.java @@ -0,0 +1,20 @@ +package org.hibernate.id.insert; + +import org.hibernate.dialect.Dialect; + +/** + * Specialized IdentifierGeneratingInsert which appends the database + * specific clause which signifies to return generated IDENTITY values + * to the end of the insert statement. + * + * @author Steve Ebersole + */ +public class InsertSelectIdentityInsert extends IdentifierGeneratingInsert { + public InsertSelectIdentityInsert(Dialect dialect) { + super( dialect ); + } + + public String toStatementString() { + return getDialect().appendIdentitySelectToInsert( super.toStatementString() ); + } +} diff --git a/src/org/hibernate/id/package.html b/src/org/hibernate/id/package.html new file mode 100755 index 0000000000..196d5e47b3 --- /dev/null +++ b/src/org/hibernate/id/package.html @@ -0,0 +1,9 @@ + + + +

    + This package contains internal implementation classes for the + main API interfaces. +

    + + diff --git a/src/org/hibernate/impl/AbstractQueryImpl.java b/src/org/hibernate/impl/AbstractQueryImpl.java new file mode 100644 index 0000000000..fb068751f0 --- /dev/null +++ b/src/org/hibernate/impl/AbstractQueryImpl.java @@ -0,0 +1,878 @@ +//$Id$ +package org.hibernate.impl; + +import java.io.Serializable; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import org.hibernate.CacheMode; +import org.hibernate.FlushMode; +import org.hibernate.Hibernate; +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.NonUniqueResultException; +import org.hibernate.PropertyNotFoundException; +import org.hibernate.Query; +import org.hibernate.QueryException; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.RowSelection; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.TypedValue; +import org.hibernate.engine.query.ParameterMetadata; +import org.hibernate.hql.classic.ParserHelper; +import org.hibernate.property.Getter; +import org.hibernate.proxy.HibernateProxyHelper; +import org.hibernate.transform.ResultTransformer; +import org.hibernate.type.SerializableType; +import org.hibernate.type.Type; +import org.hibernate.type.TypeFactory; +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.MarkerObject; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +/** + * Abstract implementation of the Query interface. + * + * @author Gavin King, Max Andersen + */ +public abstract class AbstractQueryImpl implements Query { + + private static final Object UNSET_PARAMETER = new MarkerObject(""); + private static final Object UNSET_TYPE = new MarkerObject(""); + + private final String queryString; + protected final SessionImplementor session; + protected final ParameterMetadata parameterMetadata; + + // parameter bind values... + private List values = new ArrayList(4); + private List types = new ArrayList(4); + private Map namedParameters = new HashMap(4); + private Map namedParameterLists = new HashMap(4); + + private Object optionalObject; + private Serializable optionalId; + private String optionalEntityName; + + private RowSelection selection; + private boolean cacheable; + private String cacheRegion; + private String comment; + private FlushMode flushMode; + private CacheMode cacheMode; + private FlushMode sessionFlushMode; + private CacheMode sessionCacheMode; + private Serializable collectionKey; + private boolean readOnly; + private ResultTransformer resultTransformer; + + public AbstractQueryImpl( + String queryString, + FlushMode flushMode, + SessionImplementor session, + ParameterMetadata parameterMetadata) { + this.session = session; + this.queryString = queryString; + this.selection = new RowSelection(); + this.flushMode = flushMode; + this.cacheMode = null; + this.parameterMetadata = parameterMetadata; + } + + public String toString() { + return StringHelper.unqualify( getClass().getName() ) + '(' + queryString + ')'; + } + + public final String getQueryString() { + return queryString; + } + + //TODO: maybe call it getRowSelection() ? + public RowSelection getSelection() { + return selection; + } + + public Query setFlushMode(FlushMode flushMode) { + this.flushMode = flushMode; + return this; + } + + public Query setCacheMode(CacheMode cacheMode) { + this.cacheMode = cacheMode; + return this; + } + + public Query setCacheable(boolean cacheable) { + this.cacheable = cacheable; + return this; + } + + public Query setCacheRegion(String cacheRegion) { + if (cacheRegion != null) + this.cacheRegion = cacheRegion.trim(); + return this; + } + + public Query setComment(String comment) { + this.comment = comment; + return this; + } + + public Query setFirstResult(int firstResult) { + selection.setFirstRow( new Integer(firstResult) ); + return this; + } + + public Query setMaxResults(int maxResults) { + selection.setMaxRows( new Integer(maxResults) ); + return this; + } + + public Query setTimeout(int timeout) { + selection.setTimeout( new Integer(timeout) ); + return this; + } + public Query setFetchSize(int fetchSize) { + selection.setFetchSize( new Integer(fetchSize) ); + return this; + } + + public Type[] getReturnTypes() throws HibernateException { + return session.getFactory().getReturnTypes( queryString ); + } + + public String[] getReturnAliases() throws HibernateException { + return session.getFactory().getReturnAliases( queryString ); + } + + public Query setCollectionKey(Serializable collectionKey) { + this.collectionKey = collectionKey; + return this; + } + + public boolean isReadOnly() { + return readOnly; + } + + public Query setReadOnly(boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Query setResultTransformer(ResultTransformer transformer) { + this.resultTransformer = transformer; + return this; + } + + public void setOptionalEntityName(String optionalEntityName) { + this.optionalEntityName = optionalEntityName; + } + + public void setOptionalId(Serializable optionalId) { + this.optionalId = optionalId; + } + + public void setOptionalObject(Object optionalObject) { + this.optionalObject = optionalObject; + } + + SessionImplementor getSession() { + return session; + } + + protected abstract Map getLockModes(); + + + // Parameter handling code ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Returns a shallow copy of the named parameter value map. + * + * @return Shallow copy of the named parameter value map + */ + protected Map getNamedParams() { + return new HashMap( namedParameters ); + } + + /** + * Returns an array representing all named parameter names encountered + * during (intial) parsing of the query. + *

    + * Note initial here means different things depending on whether + * this is a native-sql query or an HQL/filter query. For native-sql, a + * precursory inspection of the query string is performed specifically to + * locate defined parameters. For HQL/filter queries, this is the + * information returned from the query-translator. This distinction + * holds true for all parameter metadata exposed here. + * + * @return Array of named parameter names. + * @throws HibernateException + */ + public String[] getNamedParameters() throws HibernateException { + return ArrayHelper.toStringArray( parameterMetadata.getNamedParameterNames() ); + } + + /** + * Does this query contain named parameters? + * + * @return True if the query was found to contain named parameters; false + * otherwise; + */ + public boolean hasNamedParameters() { + return parameterMetadata.getNamedParameterNames().size() > 0; + } + + /** + * Retreive the value map for any named parameter lists (i.e., for + * auto-expansion) bound to this query. + * + * @return The parameter list value map. + */ + protected Map getNamedParameterLists() { + return namedParameterLists; + } + + /** + * Retreives the list of parameter values bound to this query for + * ordinal parameters. + * + * @return The ordinal parameter values. + */ + protected List getValues() { + return values; + } + + /** + * Retreives the list of parameter {@link Type type}s bound to this query for + * ordinal parameters. + * + * @return The ordinal parameter types. + */ + protected List getTypes() { + return types; + } + + /** + * Perform parameter validation. Used prior to executing the encapsulated + * query. + * + * @throws QueryException + */ + protected void verifyParameters() throws QueryException { + verifyParameters(false); + } + + /** + * Perform parameter validation. Used prior to executing the encapsulated + * query. + * + * @param reserveFirstParameter if true, the first ? will not be verified since + * its needed for e.g. callable statements returning a out parameter + * @throws HibernateException + */ + protected void verifyParameters(boolean reserveFirstParameter) throws HibernateException { + if ( parameterMetadata.getNamedParameterNames().size() != namedParameters.size() + namedParameterLists.size() ) { + Set missingParams = new HashSet( parameterMetadata.getNamedParameterNames() ); + missingParams.removeAll( namedParameterLists.keySet() ); + missingParams.removeAll( namedParameters.keySet() ); + throw new QueryException( "Not all named parameters have been set: " + missingParams, getQueryString() ); + } + + int positionalValueSpan = 0; + for ( int i = 0; i < values.size(); i++ ) { + Object object = types.get( i ); + if( values.get( i ) == UNSET_PARAMETER || object == UNSET_TYPE ) { + if ( reserveFirstParameter && i==0 ) { + continue; + } + else { + throw new QueryException( "Unset positional parameter at position: " + i, getQueryString() ); + } + } + positionalValueSpan += ( (Type) object ).getColumnSpan( session.getFactory() ); + } + + if ( parameterMetadata.getOrdinalParameterCount() != positionalValueSpan ) { + if ( reserveFirstParameter && parameterMetadata.getOrdinalParameterCount() - 1 != positionalValueSpan ) { + throw new QueryException( + "Expected positional parameter count: " + + (parameterMetadata.getOrdinalParameterCount()-1) + + ", actual parameters: " + + values, + getQueryString() + ); + } + else if ( !reserveFirstParameter ) { + throw new QueryException( + "Expected positional parameter count: " + + parameterMetadata.getOrdinalParameterCount() + + ", actual parameters: " + + values, + getQueryString() + ); + } + } + } + + public Query setParameter(int position, Object val, Type type) { + if ( parameterMetadata.getOrdinalParameterCount() == 0 ) { + throw new IllegalArgumentException("No positional parameters in query: " + getQueryString() ); + } + if ( position < 0 || position > parameterMetadata.getOrdinalParameterCount() - 1 ) { + throw new IllegalArgumentException("Positional parameter does not exist: " + position + " in query: " + getQueryString() ); + } + int size = values.size(); + if ( position < size ) { + values.set( position, val ); + types.set( position, type ); + } + else { + // prepend value and type list with null for any positions before the wanted position. + for ( int i = 0; i < position - size; i++ ) { + values.add( UNSET_PARAMETER ); + types.add( UNSET_TYPE ); + } + values.add( val ); + types.add( type ); + } + return this; + } + + public Query setParameter(String name, Object val, Type type) { + if ( !parameterMetadata.getNamedParameterNames().contains( name ) ) { + throw new IllegalArgumentException("Parameter " + name + " does not exist as a named parameter in [" + getQueryString() + "]"); + } + else { + namedParameters.put( name, new TypedValue( type, val, session.getEntityMode() ) ); + return this; + } + } + + public Query setParameter(int position, Object val) throws HibernateException { + if (val == null) { + setParameter( position, val, Hibernate.SERIALIZABLE ); + } + else { + setParameter( position, val, determineType( position, val ) ); + } + return this; + } + + public Query setParameter(String name, Object val) throws HibernateException { + if (val == null) { + Type type = parameterMetadata.getNamedParameterExpectedType( name ); + if ( type == null ) { + type = Hibernate.SERIALIZABLE; + } + setParameter( name, val, type ); + } + else { + setParameter( name, val, determineType( name, val ) ); + } + return this; + } + + protected Type determineType(int paramPosition, Object paramValue, Type defaultType) { + Type type = parameterMetadata.getOrdinalParameterExpectedType( paramPosition + 1 ); + if ( type == null ) { + type = defaultType; + } + return type; + } + + protected Type determineType(int paramPosition, Object paramValue) throws HibernateException { + Type type = parameterMetadata.getOrdinalParameterExpectedType( paramPosition + 1 ); + if ( type == null ) { + type = guessType( paramValue ); + } + return type; + } + + protected Type determineType(String paramName, Object paramValue, Type defaultType) { + Type type = parameterMetadata.getNamedParameterExpectedType( paramName ); + if ( type == null ) { + type = defaultType; + } + return type; + } + + protected Type determineType(String paramName, Object paramValue) throws HibernateException { + Type type = parameterMetadata.getNamedParameterExpectedType( paramName ); + if ( type == null ) { + type = guessType( paramValue ); + } + return type; + } + + protected Type determineType(String paramName, Class clazz) throws HibernateException { + Type type = parameterMetadata.getNamedParameterExpectedType( paramName ); + if ( type == null ) { + type = guessType( clazz ); + } + return type; + } + + private Type guessType(Object param) throws HibernateException { + Class clazz = HibernateProxyHelper.getClassWithoutInitializingProxy( param ); + return guessType( clazz ); + } + + private Type guessType(Class clazz) throws HibernateException { + String typename = clazz.getName(); + Type type = TypeFactory.heuristicType(typename); + boolean serializable = type!=null && type instanceof SerializableType; + if (type==null || serializable) { + try { + session.getFactory().getEntityPersister( clazz.getName() ); + } + catch (MappingException me) { + if (serializable) { + return type; + } + else { + throw new HibernateException("Could not determine a type for class: " + typename); + } + } + return Hibernate.entity(clazz); + } + else { + return type; + } + } + + public Query setString(int position, String val) { + setParameter(position, val, Hibernate.STRING); + return this; + } + + public Query setCharacter(int position, char val) { + setParameter(position, new Character(val), Hibernate.CHARACTER); + return this; + } + + public Query setBoolean(int position, boolean val) { + Boolean valueToUse = val ? Boolean.TRUE : Boolean.FALSE; + Type typeToUse = determineType( position, valueToUse, Hibernate.BOOLEAN ); + setParameter( position, valueToUse, typeToUse ); + return this; + } + + public Query setByte(int position, byte val) { + setParameter(position, new Byte(val), Hibernate.BYTE); + return this; + } + + public Query setShort(int position, short val) { + setParameter(position, new Short(val), Hibernate.SHORT); + return this; + } + + public Query setInteger(int position, int val) { + setParameter(position, new Integer(val), Hibernate.INTEGER); + return this; + } + + public Query setLong(int position, long val) { + setParameter(position, new Long(val), Hibernate.LONG); + return this; + } + + public Query setFloat(int position, float val) { + setParameter(position, new Float(val), Hibernate.FLOAT); + return this; + } + + public Query setDouble(int position, double val) { + setParameter(position, new Double(val), Hibernate.DOUBLE); + return this; + } + + public Query setBinary(int position, byte[] val) { + setParameter(position, val, Hibernate.BINARY); + return this; + } + + public Query setText(int position, String val) { + setParameter(position, val, Hibernate.TEXT); + return this; + } + + public Query setSerializable(int position, Serializable val) { + setParameter(position, val, Hibernate.SERIALIZABLE); + return this; + } + + public Query setDate(int position, Date date) { + setParameter(position, date, Hibernate.DATE); + return this; + } + + public Query setTime(int position, Date date) { + setParameter(position, date, Hibernate.TIME); + return this; + } + + public Query setTimestamp(int position, Date date) { + setParameter(position, date, Hibernate.TIMESTAMP); + return this; + } + + public Query setEntity(int position, Object val) { + setParameter( position, val, Hibernate.entity( resolveEntityName( val ) ) ); + return this; + } + + private String resolveEntityName(Object val) { + if ( val == null ) { + throw new IllegalArgumentException( "entity for parameter binding cannot be null" ); + } + return session.bestGuessEntityName( val ); + } + + public Query setLocale(int position, Locale locale) { + setParameter(position, locale, Hibernate.LOCALE); + return this; + } + + public Query setCalendar(int position, Calendar calendar) { + setParameter(position, calendar, Hibernate.CALENDAR); + return this; + } + + public Query setCalendarDate(int position, Calendar calendar) { + setParameter(position, calendar, Hibernate.CALENDAR_DATE); + return this; + } + + public Query setBinary(String name, byte[] val) { + setParameter(name, val, Hibernate.BINARY); + return this; + } + + public Query setText(String name, String val) { + setParameter(name, val, Hibernate.TEXT); + return this; + } + + public Query setBoolean(String name, boolean val) { + Boolean valueToUse = val ? Boolean.TRUE : Boolean.FALSE; + Type typeToUse = determineType( name, valueToUse, Hibernate.BOOLEAN ); + setParameter( name, valueToUse, typeToUse ); + return this; + } + + public Query setByte(String name, byte val) { + setParameter(name, new Byte(val), Hibernate.BYTE); + return this; + } + + public Query setCharacter(String name, char val) { + setParameter(name, new Character(val), Hibernate.CHARACTER); + return this; + } + + public Query setDate(String name, Date date) { + setParameter(name, date, Hibernate.DATE); + return this; + } + + public Query setDouble(String name, double val) { + setParameter(name, new Double(val), Hibernate.DOUBLE); + return this; + } + + public Query setEntity(String name, Object val) { + setParameter( name, val, Hibernate.entity( resolveEntityName( val ) ) ); + return this; + } + + public Query setFloat(String name, float val) { + setParameter(name, new Float(val), Hibernate.FLOAT); + return this; + } + + public Query setInteger(String name, int val) { + setParameter(name, new Integer(val), Hibernate.INTEGER); + return this; + } + + public Query setLocale(String name, Locale locale) { + setParameter(name, locale, Hibernate.LOCALE); + return this; + } + + public Query setCalendar(String name, Calendar calendar) { + setParameter(name, calendar, Hibernate.CALENDAR); + return this; + } + + public Query setCalendarDate(String name, Calendar calendar) { + setParameter(name, calendar, Hibernate.CALENDAR_DATE); + return this; + } + + public Query setLong(String name, long val) { + setParameter(name, new Long(val), Hibernate.LONG); + return this; + } + + public Query setSerializable(String name, Serializable val) { + setParameter(name, val, Hibernate.SERIALIZABLE); + return this; + } + + public Query setShort(String name, short val) { + setParameter(name, new Short(val), Hibernate.SHORT); + return this; + } + + public Query setString(String name, String val) { + setParameter(name, val, Hibernate.STRING); + return this; + } + + public Query setTime(String name, Date date) { + setParameter(name, date, Hibernate.TIME); + return this; + } + + public Query setTimestamp(String name, Date date) { + setParameter(name, date, Hibernate.TIMESTAMP); + return this; + } + + public Query setBigDecimal(int position, BigDecimal number) { + setParameter(position, number, Hibernate.BIG_DECIMAL); + return this; + } + + public Query setBigDecimal(String name, BigDecimal number) { + setParameter(name, number, Hibernate.BIG_DECIMAL); + return this; + } + + public Query setBigInteger(int position, BigInteger number) { + setParameter(position, number, Hibernate.BIG_INTEGER); + return this; + } + + public Query setBigInteger(String name, BigInteger number) { + setParameter(name, number, Hibernate.BIG_INTEGER); + return this; + } + + public Query setParameterList(String name, Collection vals, Type type) throws HibernateException { + if ( !parameterMetadata.getNamedParameterNames().contains( name ) ) { + throw new IllegalArgumentException("Parameter " + name + " does not exist as a named parameter in [" + getQueryString() + "]"); + } + namedParameterLists.put( name, new TypedValue( type, vals, session.getEntityMode() ) ); + return this; + } + + /** + * Warning: adds new parameters to the argument by side-effect, as well as + * mutating the query string! + */ + protected String expandParameterLists(Map namedParamsCopy) { + String query = this.queryString; + Iterator iter = namedParameterLists.entrySet().iterator(); + while ( iter.hasNext() ) { + Map.Entry me = (Map.Entry) iter.next(); + query = expandParameterList( query, (String) me.getKey(), (TypedValue) me.getValue(), namedParamsCopy ); + } + return query; + } + + /** + * Warning: adds new parameters to the argument by side-effect, as well as + * mutating the query string! + */ + private String expandParameterList(String query, String name, TypedValue typedList, Map namedParamsCopy) { + Collection vals = (Collection) typedList.getValue(); + Type type = typedList.getType(); + if ( vals.size() == 1 ) { + // short-circuit for performance... + namedParamsCopy.put( name, new TypedValue( type, vals.iterator().next(), session.getEntityMode() ) ); + return query; + } + + StringBuffer list = new StringBuffer( 16 ); + Iterator iter = vals.iterator(); + int i = 0; + boolean isJpaPositionalParam = parameterMetadata.getNamedParameterDescriptor( name ).isJpaStyle(); + while ( iter.hasNext() ) { + String alias = ( isJpaPositionalParam ? 'x' + name : name ) + i++ + '_'; + namedParamsCopy.put( alias, new TypedValue( type, iter.next(), session.getEntityMode() ) ); + list.append( ParserHelper.HQL_VARIABLE_PREFIX ).append( alias ); + if ( iter.hasNext() ) { + list.append( ", " ); + } + } + String paramPrefix = isJpaPositionalParam ? "?" : ParserHelper.HQL_VARIABLE_PREFIX; + return StringHelper.replace( query, paramPrefix + name, list.toString(), true ); + } + + public Query setParameterList(String name, Collection vals) throws HibernateException { + if ( vals == null ) { + throw new QueryException( "Collection must be not null!" ); + } + + if( vals.size() == 0 ) { + setParameterList( name, vals, null ); + } + else { + setParameterList(name, vals, determineType( name, vals.iterator().next() ) ); + } + + return this; + } + + public Query setParameterList(String name, Object[] vals, Type type) throws HibernateException { + return setParameterList( name, Arrays.asList(vals), type ); + } + + public Query setParameterList(String name, Object[] vals) throws HibernateException { + return setParameterList( name, Arrays.asList(vals) ); + } + + public Query setProperties(Map map) throws HibernateException { + String[] params = getNamedParameters(); + for (int i = 0; i < params.length; i++) { + String namedParam = params[i]; + final Object object = map.get(namedParam); + if(object==null) { + continue; + } + Class retType = object.getClass(); + if ( Collection.class.isAssignableFrom( retType ) ) { + setParameterList( namedParam, ( Collection ) object ); + } + else if ( retType.isArray() ) { + setParameterList( namedParam, ( Object[] ) object ); + } + else { + setParameter( namedParam, object, determineType( namedParam, retType ) ); + } + + + } + return this; + } + + public Query setProperties(Object bean) throws HibernateException { + Class clazz = bean.getClass(); + String[] params = getNamedParameters(); + for (int i = 0; i < params.length; i++) { + String namedParam = params[i]; + try { + Getter getter = ReflectHelper.getGetter( clazz, namedParam ); + Class retType = getter.getReturnType(); + final Object object = getter.get( bean ); + if ( Collection.class.isAssignableFrom( retType ) ) { + setParameterList( namedParam, ( Collection ) object ); + } + else if ( retType.isArray() ) { + setParameterList( namedParam, ( Object[] ) object ); + } + else { + setParameter( namedParam, object, determineType( namedParam, retType ) ); + } + } + catch (PropertyNotFoundException pnfe) { + // ignore + } + } + return this; + } + + public Query setParameters(Object[] values, Type[] types) { + this.values = Arrays.asList(values); + this.types = Arrays.asList(types); + return this; + } + + + // Execution methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Object uniqueResult() throws HibernateException { + return uniqueElement( list() ); + } + + static Object uniqueElement(List list) throws NonUniqueResultException { + int size = list.size(); + if (size==0) return null; + Object first = list.get(0); + for ( int i=1; iScrollableResults interface + * + * @author Steve Ebersole + */ +public abstract class AbstractScrollableResults implements ScrollableResults { + + private static final Log log = LogFactory.getLog( AbstractScrollableResults.class ); + + private final ResultSet resultSet; + private final PreparedStatement ps; + private final SessionImplementor session; + private final Loader loader; + private final QueryParameters queryParameters; + private final Type[] types; + private HolderInstantiator holderInstantiator; + + public AbstractScrollableResults( + ResultSet rs, + PreparedStatement ps, + SessionImplementor sess, + Loader loader, + QueryParameters queryParameters, + Type[] types, + HolderInstantiator holderInstantiator) throws MappingException { + this.resultSet=rs; + this.ps=ps; + this.session = sess; + this.loader = loader; + this.queryParameters = queryParameters; + this.types = types; + this.holderInstantiator = holderInstantiator!=null && holderInstantiator.isRequired() + ? holderInstantiator + : null; + } + + protected abstract Object[] getCurrentRow(); + + protected ResultSet getResultSet() { + return resultSet; + } + + protected PreparedStatement getPs() { + return ps; + } + + protected SessionImplementor getSession() { + return session; + } + + protected Loader getLoader() { + return loader; + } + + protected QueryParameters getQueryParameters() { + return queryParameters; + } + + protected Type[] getTypes() { + return types; + } + + protected HolderInstantiator getHolderInstantiator() { + return holderInstantiator; + } + + public final void close() throws HibernateException { + try { + // not absolutely necessary, but does help with aggressive release + session.getBatcher().closeQueryStatement( ps, resultSet ); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + sqle, + "could not close results" + ); + } + finally { + try { + session.getPersistenceContext().getLoadContexts().cleanup( resultSet ); + } + catch( Throwable ignore ) { + // ignore this error for now + log.trace( "exception trying to cleanup load context : " + ignore.getMessage() ); + } + } + } + + public final Object[] get() throws HibernateException { + return getCurrentRow(); + } + + public final Object get(int col) throws HibernateException { + return getCurrentRow()[col]; + } + + /** + * Check that the requested type is compatible with the result type, and + * return the column value. This version makes sure the the classes + * are identical. + * + * @param col the column + * @param returnType a "final" type + */ + protected final Object getFinal(int col, Type returnType) throws HibernateException { + if ( holderInstantiator!=null ) { + throw new HibernateException("query specifies a holder class"); + } + + if ( returnType.getReturnedClass()==types[col].getReturnedClass() ) { + return get(col); + } + else { + return throwInvalidColumnTypeException(col, types[col], returnType); + } + } + + /** + * Check that the requested type is compatible with the result type, and + * return the column value. This version makes sure the the classes + * are "assignable". + * + * @param col the column + * @param returnType any type + */ + protected final Object getNonFinal(int col, Type returnType) throws HibernateException { + if ( holderInstantiator!=null ) { + throw new HibernateException("query specifies a holder class"); + } + + if ( returnType.getReturnedClass().isAssignableFrom( types[col].getReturnedClass() ) ) { + return get(col); + } + else { + return throwInvalidColumnTypeException(col, types[col], returnType); + } + } + + public final BigDecimal getBigDecimal(int col) throws HibernateException { + return (BigDecimal) getFinal(col, Hibernate.BIG_DECIMAL); + } + + public final BigInteger getBigInteger(int col) throws HibernateException { + return (BigInteger) getFinal(col, Hibernate.BIG_INTEGER); + } + + public final byte[] getBinary(int col) throws HibernateException { + return (byte[]) getFinal(col, Hibernate.BINARY); + } + + public final String getText(int col) throws HibernateException { + return (String) getFinal(col, Hibernate.TEXT); + } + + public final Blob getBlob(int col) throws HibernateException { + return (Blob) getNonFinal(col, Hibernate.BLOB); + } + + public final Clob getClob(int col) throws HibernateException { + return (Clob) getNonFinal(col, Hibernate.CLOB); + } + + public final Boolean getBoolean(int col) throws HibernateException { + return (Boolean) getFinal(col, Hibernate.BOOLEAN); + } + + public final Byte getByte(int col) throws HibernateException { + return (Byte) getFinal(col, Hibernate.BYTE); + } + + public final Character getCharacter(int col) throws HibernateException { + return (Character) getFinal(col, Hibernate.CHARACTER); + } + + public final Date getDate(int col) throws HibernateException { + return (Date) getNonFinal(col, Hibernate.TIMESTAMP); + } + + public final Calendar getCalendar(int col) throws HibernateException { + return (Calendar) getNonFinal(col, Hibernate.CALENDAR); + } + + public final Double getDouble(int col) throws HibernateException { + return (Double) getFinal(col, Hibernate.DOUBLE); + } + + public final Float getFloat(int col) throws HibernateException { + return (Float) getFinal(col, Hibernate.FLOAT); + } + + public final Integer getInteger(int col) throws HibernateException { + return (Integer) getFinal(col, Hibernate.INTEGER); + } + + public final Long getLong(int col) throws HibernateException { + return (Long) getFinal(col, Hibernate.LONG); + } + + public final Short getShort(int col) throws HibernateException { + return (Short) getFinal(col, Hibernate.SHORT); + } + + public final String getString(int col) throws HibernateException { + return (String) getFinal(col, Hibernate.STRING); + } + + public final Locale getLocale(int col) throws HibernateException { + return (Locale) getFinal(col, Hibernate.LOCALE); + } + + /*public final Currency getCurrency(int col) throws HibernateException { + return (Currency) get(col); + }*/ + + public final TimeZone getTimeZone(int col) throws HibernateException { + return (TimeZone) getNonFinal(col, Hibernate.TIMEZONE); + } + + public final Type getType(int i) { + return types[i]; + } + + private Object throwInvalidColumnTypeException( + int i, + Type type, + Type returnType) throws HibernateException { + throw new HibernateException( + "incompatible column types: " + + type.getName() + + ", " + + returnType.getName() + ); + } + + protected void afterScrollOperation() { + session.afterScrollOperation(); + } +} diff --git a/src/org/hibernate/impl/AbstractSessionImpl.java b/src/org/hibernate/impl/AbstractSessionImpl.java new file mode 100755 index 0000000000..a5119d53ef --- /dev/null +++ b/src/org/hibernate/impl/AbstractSessionImpl.java @@ -0,0 +1,150 @@ +//$Id$ +package org.hibernate.impl; + +import org.hibernate.MappingException; +import org.hibernate.Query; +import org.hibernate.SQLQuery; +import org.hibernate.HibernateException; +import org.hibernate.ScrollableResults; +import org.hibernate.SessionException; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.engine.NamedQueryDefinition; +import org.hibernate.engine.NamedSQLQueryDefinition; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.query.HQLQueryPlan; +import org.hibernate.engine.query.NativeSQLQueryPlan; + +import java.util.List; + +/** + * Functionality common to stateless and stateful sessions + * + * @author Gavin King + */ +public abstract class AbstractSessionImpl implements SessionImplementor { + + protected transient SessionFactoryImpl factory; + private boolean closed = false; + + protected AbstractSessionImpl(SessionFactoryImpl factory) { + this.factory = factory; + } + + public SessionFactoryImplementor getFactory() { + return factory; + } + + public boolean isClosed() { + return closed; + } + + protected void setClosed() { + closed = true; + } + + protected void errorIfClosed() { + if ( closed ) { + throw new SessionException( "Session is closed!" ); + } + } + + public Query getNamedQuery(String queryName) throws MappingException { + errorIfClosed(); + NamedQueryDefinition nqd = factory.getNamedQuery( queryName ); + final Query query; + if ( nqd != null ) { + String queryString = nqd.getQueryString(); + query = new QueryImpl( + queryString, + nqd.getFlushMode(), + this, + getHQLQueryPlan( queryString, false ).getParameterMetadata() + ); + query.setComment( "named HQL query " + queryName ); + } + else { + NamedSQLQueryDefinition nsqlqd = factory.getNamedSQLQuery( queryName ); + if ( nsqlqd==null ) { + throw new MappingException( "Named query not known: " + queryName ); + } + query = new SQLQueryImpl( + nsqlqd, + this, + factory.getQueryPlanCache().getSQLParameterMetadata( nsqlqd.getQueryString() ) + ); + query.setComment( "named native SQL query " + queryName ); + nqd = nsqlqd; + } + initQuery( query, nqd ); + return query; + } + + public Query getNamedSQLQuery(String queryName) throws MappingException { + errorIfClosed(); + NamedSQLQueryDefinition nsqlqd = factory.getNamedSQLQuery( queryName ); + if ( nsqlqd==null ) { + throw new MappingException( "Named SQL query not known: " + queryName ); + } + Query query = new SQLQueryImpl( + nsqlqd, + this, + factory.getQueryPlanCache().getSQLParameterMetadata( nsqlqd.getQueryString() ) + ); + query.setComment( "named native SQL query " + queryName ); + initQuery( query, nsqlqd ); + return query; + } + + private void initQuery(Query query, NamedQueryDefinition nqd) { + query.setCacheable( nqd.isCacheable() ); + query.setCacheRegion( nqd.getCacheRegion() ); + if ( nqd.getTimeout()!=null ) query.setTimeout( nqd.getTimeout().intValue() ); + if ( nqd.getFetchSize()!=null ) query.setFetchSize( nqd.getFetchSize().intValue() ); + if ( nqd.getCacheMode() != null ) query.setCacheMode( nqd.getCacheMode() ); + query.setReadOnly( nqd.isReadOnly() ); + if ( nqd.getComment() != null ) query.setComment( nqd.getComment() ); + } + + public Query createQuery(String queryString) { + errorIfClosed(); + QueryImpl query = new QueryImpl( + queryString, + this, + getHQLQueryPlan( queryString, false ).getParameterMetadata() + ); + query.setComment( queryString ); + return query; + } + + public SQLQuery createSQLQuery(String sql) { + errorIfClosed(); + SQLQueryImpl query = new SQLQueryImpl( + sql, + this, + factory.getQueryPlanCache().getSQLParameterMetadata( sql ) + ); + query.setComment( "dynamic native SQL query" ); + return query; + } + + protected HQLQueryPlan getHQLQueryPlan(String query, boolean shallow) throws HibernateException { + return factory.getQueryPlanCache().getHQLQueryPlan( query, shallow, getEnabledFilters() ); + } + + protected NativeSQLQueryPlan getNativeSQLQueryPlan(NativeSQLQuerySpecification spec) throws HibernateException { + return factory.getQueryPlanCache().getNativeSQLQueryPlan( spec ); + } + + public List list(NativeSQLQuerySpecification spec, QueryParameters queryParameters) + throws HibernateException { + return listCustomQuery( getNativeSQLQueryPlan( spec ).getCustomQuery(), queryParameters ); + } + + public ScrollableResults scroll(NativeSQLQuerySpecification spec, QueryParameters queryParameters) + throws HibernateException { + return scrollCustomQuery( getNativeSQLQueryPlan( spec ).getCustomQuery(), queryParameters ); + } + +} diff --git a/src/org/hibernate/impl/CollectionFilterImpl.java b/src/org/hibernate/impl/CollectionFilterImpl.java new file mode 100644 index 0000000000..0a51771b5b --- /dev/null +++ b/src/org/hibernate/impl/CollectionFilterImpl.java @@ -0,0 +1,81 @@ +//$Id$ +package org.hibernate.impl; + +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.hibernate.HibernateException; +import org.hibernate.ScrollableResults; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.query.ParameterMetadata; +import org.hibernate.type.Type; + +/** + * implementation of the Query interface for collection filters + * @author Gavin King + */ +public class CollectionFilterImpl extends QueryImpl { + + private Object collection; + + public CollectionFilterImpl( + String queryString, + Object collection, + SessionImplementor session, + ParameterMetadata parameterMetadata) { + super( queryString, session, parameterMetadata ); + this.collection = collection; + } + + + /** + * @see org.hibernate.Query#iterate() + */ + public Iterator iterate() throws HibernateException { + verifyParameters(); + Map namedParams = getNamedParams(); + return getSession().iterateFilter( + collection, + expandParameterLists(namedParams), + getQueryParameters(namedParams) + ); + } + + /** + * @see org.hibernate.Query#list() + */ + public List list() throws HibernateException { + verifyParameters(); + Map namedParams = getNamedParams(); + return getSession().listFilter( + collection, + expandParameterLists(namedParams), + getQueryParameters(namedParams) + ); + } + + /** + * @see org.hibernate.Query#scroll() + */ + public ScrollableResults scroll() throws HibernateException { + throw new UnsupportedOperationException("Can't scroll filters"); + } + + public Type[] typeArray() { + List typeList = getTypes(); + int size = typeList.size(); + Type[] result = new Type[size+1]; + for (int i=0; iCriteria interface + * @author Gavin King + */ +public class CriteriaImpl implements Criteria, Serializable { + + private final String entityOrClassName; + private transient SessionImplementor session; + private final String rootAlias; + + private List criterionEntries = new ArrayList(); + private List orderEntries = new ArrayList(); + private Projection projection; + private Criteria projectionCriteria; + + private List subcriteriaList = new ArrayList(); + + private Map fetchModes = new HashMap(); + private Map lockModes = new HashMap(); + + private Integer maxResults; + private Integer firstResult; + private Integer timeout; + private Integer fetchSize; + + private boolean cacheable; + private String cacheRegion; + private String comment; + + private FlushMode flushMode; + private CacheMode cacheMode; + private FlushMode sessionFlushMode; + private CacheMode sessionCacheMode; + + private ResultTransformer resultTransformer = Criteria.ROOT_ENTITY; + + + // Constructors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public CriteriaImpl(String entityOrClassName, SessionImplementor session) { + this(entityOrClassName, ROOT_ALIAS, session); + } + + public CriteriaImpl(String entityOrClassName, String alias, SessionImplementor session) { + this.session = session; + this.entityOrClassName = entityOrClassName; + this.cacheable = false; + this.rootAlias = alias; + } + + public String toString() { + return "CriteriaImpl(" + + entityOrClassName + ":" + + (rootAlias==null ? "" : rootAlias) + + subcriteriaList.toString() + + criterionEntries.toString() + + ( projection==null ? "" : projection.toString() ) + + ')'; + } + + + // State ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public SessionImplementor getSession() { + return session; + } + + public void setSession(SessionImplementor session) { + this.session = session; + } + + public String getEntityOrClassName() { + return entityOrClassName; + } + + public Map getLockModes() { + return lockModes; + } + + public Criteria getProjectionCriteria() { + return projectionCriteria; + } + + public Iterator iterateSubcriteria() { + return subcriteriaList.iterator(); + } + + public Iterator iterateExpressionEntries() { + return criterionEntries.iterator(); + } + + public Iterator iterateOrderings() { + return orderEntries.iterator(); + } + + public Criteria add(Criteria criteriaInst, Criterion expression) { + criterionEntries.add( new CriterionEntry(expression, criteriaInst) ); + return this; + } + + + // Criteria impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public String getAlias() { + return rootAlias; + } + + public Projection getProjection() { + return projection; + } + + public Criteria setProjection(Projection projection) { + this.projection = projection; + this.projectionCriteria = this; + setResultTransformer( PROJECTION ); + return this; + } + + public Criteria add(Criterion expression) { + add( this, expression ); + return this; + } + + public Criteria addOrder(Order ordering) { + orderEntries.add( new OrderEntry( ordering, this ) ); + return this; + } + + public FetchMode getFetchMode(String path) { + return (FetchMode) fetchModes.get(path); + } + + public Criteria setFetchMode(String associationPath, FetchMode mode) { + fetchModes.put( associationPath, mode ); + return this; + } + + public Criteria setLockMode(LockMode lockMode) { + return setLockMode( getAlias(), lockMode ); + } + + public Criteria setLockMode(String alias, LockMode lockMode) { + lockModes.put( alias, lockMode ); + return this; + } + + public Criteria createAlias(String associationPath, String alias) { + return createAlias( associationPath, alias, INNER_JOIN ); + } + + public Criteria createAlias(String associationPath, String alias, int joinType) { + new Subcriteria( this, associationPath, alias, joinType ); + return this; + } + + public Criteria createCriteria(String associationPath) { + return createCriteria( associationPath, INNER_JOIN ); + } + + public Criteria createCriteria(String associationPath, int joinType) { + return new Subcriteria( this, associationPath, joinType ); + } + + public Criteria createCriteria(String associationPath, String alias) { + return createCriteria( associationPath, alias, INNER_JOIN ); + } + + public Criteria createCriteria(String associationPath, String alias, int joinType) { + return new Subcriteria( this, associationPath, alias, joinType ); + } + + public ResultTransformer getResultTransformer() { + return resultTransformer; + } + + public Criteria setResultTransformer(ResultTransformer tupleMapper) { + this.resultTransformer = tupleMapper; + return this; + } + + public Integer getMaxResults() { + return maxResults; + } + + public Criteria setMaxResults(int maxResults) { + this.maxResults = new Integer(maxResults); + return this; + } + + public Integer getFirstResult() { + return firstResult; + } + + public Criteria setFirstResult(int firstResult) { + this.firstResult = new Integer(firstResult); + return this; + } + + public Integer getFetchSize() { + return fetchSize; + } + + public Criteria setFetchSize(int fetchSize) { + this.fetchSize = new Integer(fetchSize); + return this; + } + + public Integer getTimeout() { + return timeout; + } + + public Criteria setTimeout(int timeout) { + this.timeout = new Integer(timeout); + return this; + } + + public boolean getCacheable() { + return this.cacheable; + } + + public Criteria setCacheable(boolean cacheable) { + this.cacheable = cacheable; + return this; + } + + public String getCacheRegion() { + return this.cacheRegion; + } + + public Criteria setCacheRegion(String cacheRegion) { + this.cacheRegion = cacheRegion.trim(); + return this; + } + + public String getComment() { + return comment; + } + + public Criteria setComment(String comment) { + this.comment = comment; + return this; + } + + public Criteria setFlushMode(FlushMode flushMode) { + this.flushMode = flushMode; + return this; + } + + public Criteria setCacheMode(CacheMode cacheMode) { + this.cacheMode = cacheMode; + return this; + } + + public List list() throws HibernateException { + before(); + try { + return session.list( this ); + } + finally { + after(); + } + } + + public ScrollableResults scroll() { + return scroll( ScrollMode.SCROLL_INSENSITIVE ); + } + + public ScrollableResults scroll(ScrollMode scrollMode) { + before(); + try { + return session.scroll(this, scrollMode); + } + finally { + after(); + } + } + + public Object uniqueResult() throws HibernateException { + return AbstractQueryImpl.uniqueElement( list() ); + } + + protected void before() { + if ( flushMode != null ) { + sessionFlushMode = getSession().getFlushMode(); + getSession().setFlushMode( flushMode ); + } + if ( cacheMode != null ) { + sessionCacheMode = getSession().getCacheMode(); + getSession().setCacheMode( cacheMode ); + } + } + + protected void after() { + if ( sessionFlushMode != null ) { + getSession().setFlushMode( sessionFlushMode ); + sessionFlushMode = null; + } + if ( sessionCacheMode != null ) { + getSession().setCacheMode( sessionCacheMode ); + sessionCacheMode = null; + } + } + + public boolean isLookupByNaturalKey() { + if ( projection != null ) { + return false; + } + if ( subcriteriaList.size() > 0 ) { + return false; + } + if ( criterionEntries.size() != 1 ) { + return false; + } + CriterionEntry ce = (CriterionEntry) criterionEntries.get(0); + return ce.getCriterion() instanceof NaturalIdentifier; + } + + + // Inner classes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public final class Subcriteria implements Criteria, Serializable { + + private String alias; + private String path; + private Criteria parent; + private LockMode lockMode; + private int joinType; + + + // Constructors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + private Subcriteria(Criteria parent, String path, String alias, int joinType) { + this.alias = alias; + this.path = path; + this.parent = parent; + this.joinType = joinType; + CriteriaImpl.this.subcriteriaList.add(this); + } + + private Subcriteria(Criteria parent, String path, int joinType) { + this( parent, path, null, joinType ); + } + + public String toString() { + return "Subcriteria(" + + path + ":" + + (alias==null ? "" : alias) + + ')'; + } + + + // State ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public String getPath() { + return path; + } + + public Criteria getParent() { + return parent; + } + + public LockMode getLockMode() { + return lockMode; + } + + public Criteria setLockMode(LockMode lockMode) { + this.lockMode = lockMode; + return this; + } + + public int getJoinType() { + return joinType; + } + + + // Criteria impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Criteria add(Criterion expression) { + CriteriaImpl.this.add(this, expression); + return this; + } + + public Criteria addOrder(Order order) { + CriteriaImpl.this.orderEntries.add( new OrderEntry(order, this) ); + return this; + } + + public Criteria createAlias(String associationPath, String alias) { + return createAlias( associationPath, alias, INNER_JOIN ); + } + + public Criteria createAlias(String associationPath, String alias, int joinType) throws HibernateException { + new Subcriteria( this, associationPath, alias, joinType ); + return this; + } + + public Criteria createCriteria(String associationPath) { + return createCriteria( associationPath, INNER_JOIN ); + } + + public Criteria createCriteria(String associationPath, int joinType) throws HibernateException { + return new Subcriteria( Subcriteria.this, associationPath, joinType ); + } + + public Criteria createCriteria(String associationPath, String alias) { + return createCriteria( associationPath, alias, INNER_JOIN ); + } + + public Criteria createCriteria(String associationPath, String alias, int joinType) throws HibernateException { + return new Subcriteria( Subcriteria.this, associationPath, alias, joinType ); + } + + public Criteria setCacheable(boolean cacheable) { + CriteriaImpl.this.setCacheable(cacheable); + return this; + } + + public Criteria setCacheRegion(String cacheRegion) { + CriteriaImpl.this.setCacheRegion(cacheRegion); + return this; + } + + public List list() throws HibernateException { + return CriteriaImpl.this.list(); + } + + public ScrollableResults scroll() throws HibernateException { + return CriteriaImpl.this.scroll(); + } + + public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException { + return CriteriaImpl.this.scroll(scrollMode); + } + + public Object uniqueResult() throws HibernateException { + return CriteriaImpl.this.uniqueResult(); + } + + public Criteria setFetchMode(String associationPath, FetchMode mode) + throws HibernateException { + CriteriaImpl.this.setFetchMode( StringHelper.qualify(path, associationPath), mode); + return this; + } + + public Criteria setFlushMode(FlushMode flushMode) { + CriteriaImpl.this.setFlushMode(flushMode); + return this; + } + + public Criteria setCacheMode(CacheMode cacheMode) { + CriteriaImpl.this.setCacheMode(cacheMode); + return this; + } + + public Criteria setFirstResult(int firstResult) { + CriteriaImpl.this.setFirstResult(firstResult); + return this; + } + + public Criteria setMaxResults(int maxResults) { + CriteriaImpl.this.setMaxResults(maxResults); + return this; + } + + public Criteria setTimeout(int timeout) { + CriteriaImpl.this.setTimeout(timeout); + return this; + } + + public Criteria setFetchSize(int fetchSize) { + CriteriaImpl.this.setFetchSize(fetchSize); + return this; + } + + public Criteria setLockMode(String alias, LockMode lockMode) { + CriteriaImpl.this.setLockMode(alias, lockMode); + return this; + } + + public Criteria setResultTransformer(ResultTransformer resultProcessor) { + CriteriaImpl.this.setResultTransformer(resultProcessor); + return this; + } + + public Criteria setComment(String comment) { + CriteriaImpl.this.setComment(comment); + return this; + } + + public Criteria setProjection(Projection projection) { + CriteriaImpl.this.projection = projection; + CriteriaImpl.this.projectionCriteria = this; + setResultTransformer(PROJECTION); + return this; + } + } + + public static final class CriterionEntry implements Serializable { + private final Criterion criterion; + private final Criteria criteria; + + private CriterionEntry(Criterion criterion, Criteria criteria) { + this.criteria = criteria; + this.criterion = criterion; + } + + public Criterion getCriterion() { + return criterion; + } + + public Criteria getCriteria() { + return criteria; + } + + public String toString() { + return criterion.toString(); + } + } + + public static final class OrderEntry implements Serializable { + private final Order order; + private final Criteria criteria; + + private OrderEntry(Order order, Criteria criteria) { + this.criteria = criteria; + this.order = order; + } + + public Order getOrder() { + return order; + } + + public Criteria getCriteria() { + return criteria; + } + + public String toString() { + return order.toString(); + } + } +} diff --git a/src/org/hibernate/impl/FetchingScrollableResultsImpl.java b/src/org/hibernate/impl/FetchingScrollableResultsImpl.java new file mode 100644 index 0000000000..b2230c1a2c --- /dev/null +++ b/src/org/hibernate/impl/FetchingScrollableResultsImpl.java @@ -0,0 +1,293 @@ +// $Id$ +package org.hibernate.impl; + +import org.hibernate.HibernateException; +import org.hibernate.MappingException; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.hql.HolderInstantiator; +import org.hibernate.type.Type; +import org.hibernate.loader.Loader; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.QueryParameters; + +import java.sql.ResultSet; +import java.sql.PreparedStatement; +import java.sql.SQLException; + +/** + * Implementation of ScrollableResults which can handle collection fetches. + * + * @author Steve Ebersole + */ +public class FetchingScrollableResultsImpl extends AbstractScrollableResults { + + public FetchingScrollableResultsImpl( + ResultSet rs, + PreparedStatement ps, + SessionImplementor sess, + Loader loader, + QueryParameters queryParameters, + Type[] types, + HolderInstantiator holderInstantiator) throws MappingException { + super( rs, ps, sess, loader, queryParameters, types, holderInstantiator ); + } + + private Object[] currentRow = null; + private int currentPosition = 0; + private Integer maxPosition = null; + + protected Object[] getCurrentRow() { + return currentRow; + } + + /** + * Advance to the next result + * + * @return true if there is another result + */ + public boolean next() throws HibernateException { + if ( maxPosition != null && maxPosition.intValue() <= currentPosition ) { + currentRow = null; + currentPosition = maxPosition.intValue() + 1; + return false; + } + + Object row = getLoader().loadSequentialRowsForward( + getResultSet(), + getSession(), + getQueryParameters(), + false + ); + + + boolean afterLast; + try { + afterLast = getResultSet().isAfterLast(); + } + catch( SQLException e ) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + e, + "exception calling isAfterLast()" + ); + } + + currentPosition++; + currentRow = new Object[] { row }; + + if ( afterLast ) { + if ( maxPosition == null ) { + // we just hit the last position + maxPosition = new Integer( currentPosition ); + } + } + + afterScrollOperation(); + + return true; + } + + /** + * Retreat to the previous result + * + * @return true if there is a previous result + */ + public boolean previous() throws HibernateException { + if ( currentPosition <= 1 ) { + currentPosition = 0; + currentRow = null; + return false; + } + + Object loadResult = getLoader().loadSequentialRowsReverse( + getResultSet(), + getSession(), + getQueryParameters(), + false, + ( maxPosition != null && currentPosition > maxPosition.intValue() ) + ); + + currentRow = new Object[] { loadResult }; + currentPosition--; + + afterScrollOperation(); + + return true; + + } + + /** + * Scroll an arbitrary number of locations + * + * @param positions a positive (forward) or negative (backward) number of rows + * + * @return true if there is a result at the new location + */ + public boolean scroll(int positions) throws HibernateException { + boolean more = false; + if ( positions > 0 ) { + // scroll ahead + for ( int i = 0; i < positions; i++ ) { + more = next(); + if ( !more ) { + break; + } + } + } + else if ( positions < 0 ) { + // scroll backward + for ( int i = 0; i < ( 0 - positions ); i++ ) { + more = previous(); + if ( !more ) { + break; + } + } + } + else { + throw new HibernateException( "scroll(0) not valid" ); + } + + afterScrollOperation(); + + return more; + } + + /** + * Go to the last result + * + * @return true if there are any results + */ + public boolean last() throws HibernateException { + boolean more = false; + if ( maxPosition != null ) { + for ( int i = currentPosition; i < maxPosition.intValue(); i++ ) { + more = next(); + } + } + else { + try { + if ( getResultSet().isAfterLast() ) { + // should not be able to reach last without maxPosition being set + // unless there are no results + return false; + } + + while ( !getResultSet().isAfterLast() ) { + more = next(); + } + } + catch( SQLException e ) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + e, + "exception calling isAfterLast()" + ); + } + } + + afterScrollOperation(); + + return more; + } + + /** + * Go to the first result + * + * @return true if there are any results + */ + public boolean first() throws HibernateException { + beforeFirst(); + boolean more = next(); + + afterScrollOperation(); + + return more; + } + + /** + * Go to a location just before first result (this is the initial location) + */ + public void beforeFirst() throws HibernateException { + try { + getResultSet().beforeFirst(); + } + catch( SQLException e ) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + e, + "exception calling beforeFirst()" + ); + } + currentRow = null; + currentPosition = 0; + } + + /** + * Go to a location just after the last result + */ + public void afterLast() throws HibernateException { + // TODO : not sure the best way to handle this. + // The non-performant way : + last(); + next(); + afterScrollOperation(); + } + + /** + * Is this the first result? + * + * @return true if this is the first row of results + * + * @throws org.hibernate.HibernateException + */ + public boolean isFirst() throws HibernateException { + return currentPosition == 1; + } + + /** + * Is this the last result? + * + * @return true if this is the last row of results + * + * @throws org.hibernate.HibernateException + */ + public boolean isLast() throws HibernateException { + if ( maxPosition == null ) { + // we have not yet hit the last result... + return false; + } + else { + return currentPosition == maxPosition.intValue(); + } + } + + /** + * Get the current location in the result set. The first row is number 0, contrary to JDBC. + * + * @return the row number, numbered from 0, or -1 if there is no current row + */ + public int getRowNumber() throws HibernateException { + return currentPosition; + } + + /** + * Set the current location in the result set, numbered from either the first row (row number 0), or the last + * row (row number -1). + * + * @param rowNumber the row number, numbered from the last row, in the case of a negative row number + * + * @return true if there is a row at that row number + */ + public boolean setRowNumber(int rowNumber) throws HibernateException { + if ( rowNumber == 1 ) { + return first(); + } + else if ( rowNumber == -1 ) { + return last(); + } + else if ( maxPosition != null && rowNumber == maxPosition.intValue() ) { + return last(); + } + return scroll( rowNumber - currentPosition ); + } +} diff --git a/src/org/hibernate/impl/FilterImpl.java b/src/org/hibernate/impl/FilterImpl.java new file mode 100644 index 0000000000..86f0363e99 --- /dev/null +++ b/src/org/hibernate/impl/FilterImpl.java @@ -0,0 +1,151 @@ +// $Id$ +package org.hibernate.impl; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import org.hibernate.Filter; +import org.hibernate.HibernateException; +import org.hibernate.engine.FilterDefinition; +import org.hibernate.type.Type; + +/** + * Implementation of FilterImpl. FilterImpl implements the user's + * view into enabled dynamic filters, allowing them to set filter parameter values. + * + * @author Steve Ebersole + */ +public class FilterImpl implements Filter, Serializable { + public static final String MARKER = "$FILTER_PLACEHOLDER$"; + + private transient FilterDefinition definition; + private String filterName; + private Map parameters = new HashMap(); + + void afterDeserialize(SessionFactoryImpl factory) { + definition = factory.getFilterDefinition(filterName); + } + + /** + * Constructs a new FilterImpl. + * + * @param configuration The filter's global configuration. + */ + public FilterImpl(FilterDefinition configuration) { + this.definition = configuration; + filterName = definition.getFilterName(); + } + + public FilterDefinition getFilterDefinition() { + return definition; + } + + /** + * Get the name of this filter. + * + * @return This filter's name. + */ + public String getName() { + return definition.getFilterName(); + } + + public Map getParameters() { + return parameters; + } + + /** + * Set the named parameter's value for this filter. + * + * @param name The parameter's name. + * @param value The value to be applied. + * @return This FilterImpl instance (for method chaining). + * @throws IllegalArgumentException Indicates that either the parameter was undefined or that the type + * of the passed value did not match the configured type. + */ + public Filter setParameter(String name, Object value) throws IllegalArgumentException { + // Make sure this is a defined parameter and check the incoming value type + // TODO: what should be the actual exception type here? + Type type = definition.getParameterType( name ); + if ( type == null ) { + throw new IllegalArgumentException( "Undefined filter parameter [" + name + "]" ); + } + if ( value != null && !type.getReturnedClass().isAssignableFrom( value.getClass() ) ) { + throw new IllegalArgumentException( "Incorrect type for parameter [" + name + "]" ); + } + parameters.put( name, value ); + return this; + } + + /** + * Set the named parameter's value list for this filter. Used + * in conjunction with IN-style filter criteria. + * + * @param name The parameter's name. + * @param values The values to be expanded into an SQL IN list. + * @return This FilterImpl instance (for method chaining). + */ + public Filter setParameterList(String name, Collection values) throws HibernateException { + // Make sure this is a defined parameter and check the incoming value type + if ( values == null ) { + throw new IllegalArgumentException( "Collection must be not null!" ); + } + Type type = definition.getParameterType( name ); + if ( type == null ) { + throw new HibernateException( "Undefined filter parameter [" + name + "]" ); + } + if ( values.size() > 0 ) { + Class elementClass = values.iterator().next().getClass(); + if ( !type.getReturnedClass().isAssignableFrom( elementClass ) ) { + throw new HibernateException( "Incorrect type for parameter [" + name + "]" ); + } + } + parameters.put( name, values ); + return this; + } + + /** + * Set the named parameter's value list for this filter. Used + * in conjunction with IN-style filter criteria. + * + * @param name The parameter's name. + * @param values The values to be expanded into an SQL IN list. + * @return This FilterImpl instance (for method chaining). + */ + public Filter setParameterList(String name, Object[] values) throws IllegalArgumentException { + return setParameterList( name, Arrays.asList( values ) ); + } + + /** + * Get the value of the named parameter for the current filter. + * + * @param name The name of the parameter for which to return the value. + * @return The value of the named parameter. + */ + public Object getParameter(String name) { + return parameters.get( name ); + } + + /** + * Perform validation of the filter state. This is used to verify the + * state of the filter after its enablement and before its use. + * + * @throws HibernateException If the state is not currently valid. + */ + public void validate() throws HibernateException { + // for each of the defined parameters, make sure its value + // has been set + Iterator itr = definition.getParameterNames().iterator(); + while ( itr.hasNext() ) { + final String parameterName = (String) itr.next(); + if ( parameters.get( parameterName ) == null ) { + throw new HibernateException( + "Filter [" + getName() + "] parameter [" + parameterName + "] value not set" + ); + } + } + } +} diff --git a/src/org/hibernate/impl/IteratorImpl.java b/src/org/hibernate/impl/IteratorImpl.java new file mode 100644 index 0000000000..ae4caa6d2d --- /dev/null +++ b/src/org/hibernate/impl/IteratorImpl.java @@ -0,0 +1,161 @@ +//$Id$ +package org.hibernate.impl; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.NoSuchElementException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.HibernateException; +import org.hibernate.JDBCException; +import org.hibernate.engine.HibernateIterator; +import org.hibernate.event.EventSource; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.hql.HolderInstantiator; +import org.hibernate.type.EntityType; +import org.hibernate.type.Type; + +/** + * An implementation of java.util.Iterator that is + * returned by iterate() query execution methods. + * @author Gavin King + */ +public final class IteratorImpl implements HibernateIterator { + + private static final Log log = LogFactory.getLog(IteratorImpl.class); + + private ResultSet rs; + private final EventSource session; + private final Type[] types; + private final boolean single; + private Object currentResult; + private boolean hasNext; + private final String[][] names; + private PreparedStatement ps; + private Object nextResult; + private HolderInstantiator holderInstantiator; + + public IteratorImpl( + ResultSet rs, + PreparedStatement ps, + EventSource sess, + Type[] types, + String[][] columnNames, + HolderInstantiator holderInstantiator) + throws HibernateException, SQLException { + + this.rs=rs; + this.ps=ps; + this.session = sess; + this.types = types; + this.names = columnNames; + this.holderInstantiator = holderInstantiator; + + single = types.length==1; + + postNext(); + } + + public void close() throws JDBCException { + if (ps!=null) { + try { + log.debug("closing iterator"); + nextResult = null; + session.getBatcher().closeQueryStatement(ps, rs); + ps = null; + rs = null; + hasNext = false; + } + catch (SQLException e) { + log.info( "Unable to close iterator", e ); + throw JDBCExceptionHelper.convert( + session.getFactory().getSQLExceptionConverter(), + e, + "Unable to close iterator" + ); + } + finally { + try { + session.getPersistenceContext().getLoadContexts().cleanup( rs ); + } + catch( Throwable ignore ) { + // ignore this error for now + log.trace( "exception trying to cleanup load context : " + ignore.getMessage() ); + } + } + } + } + + private void postNext() throws HibernateException, SQLException { + this.hasNext = rs.next(); + if (!hasNext) { + log.debug("exhausted results"); + close(); + } + else { + log.debug("retrieving next results"); + boolean isHolder = holderInstantiator.isRequired(); + + if ( single && !isHolder ) { + nextResult = types[0].nullSafeGet( rs, names[0], session, null ); + } + else { + Object[] nextResults = new Object[types.length]; + for (int i=0; iQuery interface, + * for "ordinary" HQL queries (not collection filters) + * @see CollectionFilterImpl + * @author Gavin King + */ +public class QueryImpl extends AbstractQueryImpl { + + private Map lockModes = new HashMap(2); + + public QueryImpl( + String queryString, + FlushMode flushMode, + SessionImplementor session, + ParameterMetadata parameterMetadata) { + super( queryString, flushMode, session, parameterMetadata ); + } + + public QueryImpl(String queryString, SessionImplementor session, ParameterMetadata parameterMetadata) { + this( queryString, null, session, parameterMetadata ); + } + + public Iterator iterate() throws HibernateException { + verifyParameters(); + Map namedParams = getNamedParams(); + before(); + try { + return getSession().iterate( + expandParameterLists(namedParams), + getQueryParameters(namedParams) + ); + } + finally { + after(); + } + } + + public ScrollableResults scroll() throws HibernateException { + return scroll( ScrollMode.SCROLL_INSENSITIVE ); + } + + public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException { + verifyParameters(); + Map namedParams = getNamedParams(); + before(); + QueryParameters qp = getQueryParameters(namedParams); + qp.setScrollMode(scrollMode); + try { + return getSession().scroll( expandParameterLists(namedParams), qp ); + } + finally { + after(); + } + } + + public List list() throws HibernateException { + verifyParameters(); + Map namedParams = getNamedParams(); + before(); + try { + return getSession().list( + expandParameterLists(namedParams), + getQueryParameters(namedParams) + ); + } + finally { + after(); + } + } + + public int executeUpdate() throws HibernateException { + verifyParameters(); + Map namedParams = getNamedParams(); + before(); + try { + return getSession().executeUpdate( + expandParameterLists( namedParams ), + getQueryParameters( namedParams ) + ); + } + finally { + after(); + } + } + + public Query setLockMode(String alias, LockMode lockMode) { + lockModes.put(alias, lockMode); + return this; + } + + protected Map getLockModes() { + return lockModes; + } + +} + + + + + + diff --git a/src/org/hibernate/impl/SQLQueryImpl.java b/src/org/hibernate/impl/SQLQueryImpl.java new file mode 100644 index 0000000000..2e7148e24f --- /dev/null +++ b/src/org/hibernate/impl/SQLQueryImpl.java @@ -0,0 +1,344 @@ +//$Id$ +package org.hibernate.impl; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.io.Serializable; + +import org.hibernate.FlushMode; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.Query; +import org.hibernate.QueryException; +import org.hibernate.SQLQuery; +import org.hibernate.ScrollMode; +import org.hibernate.ScrollableResults; +import org.hibernate.MappingException; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.engine.ResultSetMappingDefinition; +import org.hibernate.engine.NamedSQLQueryDefinition; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.query.ParameterMetadata; +import org.hibernate.engine.query.sql.NativeSQLQueryJoinReturn; +import org.hibernate.engine.query.sql.NativeSQLQueryScalarReturn; +import org.hibernate.engine.query.sql.NativeSQLQueryRootReturn; +import org.hibernate.engine.query.sql.NativeSQLQueryReturn; +import org.hibernate.type.Type; +import org.hibernate.util.CollectionHelper; +import org.hibernate.util.StringHelper; + +/** + * Implements SQL query passthrough. + * + *

    + * 
    + * 
    + *   SELECT {person}.NAME AS {person.name}, {person}.AGE AS {person.age}, {person}.SEX AS {person.sex}
    + *   FROM PERSON {person} WHERE {person}.NAME LIKE 'Hiber%'
    + * 
    + * 
    + * + * @author Max Andersen + */ +public class SQLQueryImpl extends AbstractQueryImpl implements SQLQuery { + + private final List queryReturns; + private Collection querySpaces; + private final boolean callable; + private boolean autodiscovertypes; + + /** + * Constructs a SQLQueryImpl given a sql query defined in the mappings. + * + * @param queryDef The representation of the defined . + * @param session The session to which this SQLQueryImpl belongs. + * @param parameterMetadata Metadata about parameters found in the query. + */ + SQLQueryImpl(NamedSQLQueryDefinition queryDef, SessionImplementor session, ParameterMetadata parameterMetadata) { + super( queryDef.getQueryString(), queryDef.getFlushMode(), session, parameterMetadata ); + if ( queryDef.getResultSetRef() != null ) { + ResultSetMappingDefinition definition = session.getFactory() + .getResultSetMapping( queryDef.getResultSetRef() ); + if (definition == null) { + throw new MappingException( + "Unable to find resultset-ref definition: " + + queryDef.getResultSetRef() + ); + } + this.queryReturns = Arrays.asList( definition.getQueryReturns() ); + } + else { + this.queryReturns = Arrays.asList( queryDef.getQueryReturns() ); + } + + this.querySpaces = queryDef.getQuerySpaces(); + this.callable = queryDef.isCallable(); + } + + SQLQueryImpl( + final String sql, + final List queryReturns, + final Collection querySpaces, + final FlushMode flushMode, + boolean callable, + final SessionImplementor session, + ParameterMetadata parameterMetadata) { + // TODO : absolutely no usages of this constructor form; can it go away? + super( sql, flushMode, session, parameterMetadata ); + this.queryReturns = queryReturns; + this.querySpaces = querySpaces; + this.callable = callable; + } + + SQLQueryImpl( + final String sql, + final String returnAliases[], + final Class returnClasses[], + final LockMode[] lockModes, + final SessionImplementor session, + final Collection querySpaces, + final FlushMode flushMode, + ParameterMetadata parameterMetadata) { + // TODO : this constructor form is *only* used from constructor directly below us; can it go away? + super( sql, flushMode, session, parameterMetadata ); + queryReturns = new ArrayList(returnAliases.length); + for ( int i=0; iScrollableResults interface + * @author Gavin King + */ +public class ScrollableResultsImpl extends AbstractScrollableResults implements ScrollableResults { + + private Object[] currentRow; + + public ScrollableResultsImpl( + ResultSet rs, + PreparedStatement ps, + SessionImplementor sess, + Loader loader, + QueryParameters queryParameters, + Type[] types, HolderInstantiator holderInstantiator) throws MappingException { + super( rs, ps, sess, loader, queryParameters, types, holderInstantiator ); + } + + protected Object[] getCurrentRow() { + return currentRow; + } + + /** + * @see org.hibernate.ScrollableResults#scroll(int) + */ + public boolean scroll(int i) throws HibernateException { + try { + boolean result = getResultSet().relative(i); + prepareCurrentRow(result); + return result; + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "could not advance using scroll()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#first() + */ + public boolean first() throws HibernateException { + try { + boolean result = getResultSet().first(); + prepareCurrentRow(result); + return result; + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "could not advance using first()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#last() + */ + public boolean last() throws HibernateException { + try { + boolean result = getResultSet().last(); + prepareCurrentRow(result); + return result; + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "could not advance using last()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#next() + */ + public boolean next() throws HibernateException { + try { + boolean result = getResultSet().next(); + prepareCurrentRow(result); + return result; + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "could not advance using next()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#previous() + */ + public boolean previous() throws HibernateException { + try { + boolean result = getResultSet().previous(); + prepareCurrentRow(result); + return result; + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "could not advance using previous()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#afterLast() + */ + public void afterLast() throws HibernateException { + try { + getResultSet().afterLast(); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "exception calling afterLast()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#beforeFirst() + */ + public void beforeFirst() throws HibernateException { + try { + getResultSet().beforeFirst(); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "exception calling beforeFirst()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#isFirst() + */ + public boolean isFirst() throws HibernateException { + try { + return getResultSet().isFirst(); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "exception calling isFirst()" + ); + } + } + + /** + * @see org.hibernate.ScrollableResults#isLast() + */ + public boolean isLast() throws HibernateException { + try { + return getResultSet().isLast(); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "exception calling isLast()" + ); + } + } + + public int getRowNumber() throws HibernateException { + try { + return getResultSet().getRow()-1; + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "exception calling getRow()" + ); + } + } + + public boolean setRowNumber(int rowNumber) throws HibernateException { + if (rowNumber>=0) rowNumber++; + try { + boolean result = getResultSet().absolute(rowNumber); + prepareCurrentRow(result); + return result; + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + getSession().getFactory().getSQLExceptionConverter(), + sqle, + "could not advance using absolute()" + ); + } + } + + private void prepareCurrentRow(boolean underlyingScrollSuccessful) + throws HibernateException { + + if (!underlyingScrollSuccessful) { + currentRow = null; + return; + } + + Object result = getLoader().loadSingleRow( + getResultSet(), + getSession(), + getQueryParameters(), + false + ); + if ( result != null && result.getClass().isArray() ) { + currentRow = (Object[]) result; + } + else { + currentRow = new Object[] { result }; + } + + if ( getHolderInstantiator() != null ) { + currentRow = new Object[] { getHolderInstantiator().instantiate(currentRow) }; + } + + afterScrollOperation(); + } + +} diff --git a/src/org/hibernate/impl/SessionFactoryImpl.java b/src/org/hibernate/impl/SessionFactoryImpl.java new file mode 100644 index 0000000000..a9765758ef --- /dev/null +++ b/src/org/hibernate/impl/SessionFactoryImpl.java @@ -0,0 +1,1070 @@ +//$Id$ +package org.hibernate.impl; + +import java.io.IOException; +import java.io.InvalidObjectException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.ObjectStreamException; +import java.io.Serializable; +import java.sql.Connection; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import javax.naming.NamingException; +import javax.naming.Reference; +import javax.naming.StringRefAddr; +import javax.transaction.TransactionManager; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.EntityMode; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.MappingException; +import org.hibernate.ObjectNotFoundException; +import org.hibernate.QueryException; +import org.hibernate.SessionFactory; +import org.hibernate.StatelessSession; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.cache.Cache; +import org.hibernate.cache.CacheConcurrencyStrategy; +import org.hibernate.cache.CacheFactory; +import org.hibernate.cache.CacheKey; +import org.hibernate.cache.OptimisticCache; +import org.hibernate.cache.QueryCache; +import org.hibernate.cache.UpdateTimestampsCache; +import org.hibernate.cfg.Configuration; +import org.hibernate.cfg.Environment; +import org.hibernate.cfg.Settings; +import org.hibernate.connection.ConnectionProvider; +import org.hibernate.context.CurrentSessionContext; +import org.hibernate.context.JTASessionContext; +import org.hibernate.context.ManagedSessionContext; +import org.hibernate.context.ThreadLocalSessionContext; +import org.hibernate.dialect.Dialect; +import org.hibernate.dialect.function.SQLFunctionRegistry; +import org.hibernate.engine.FilterDefinition; +import org.hibernate.engine.Mapping; +import org.hibernate.engine.NamedQueryDefinition; +import org.hibernate.engine.NamedSQLQueryDefinition; +import org.hibernate.engine.ResultSetMappingDefinition; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.engine.query.QueryPlanCache; +import org.hibernate.event.EventListeners; +import org.hibernate.exception.SQLExceptionConverter; +import org.hibernate.id.IdentifierGenerator; +import org.hibernate.id.UUIDHexGenerator; +import org.hibernate.jdbc.BatcherFactory; +import org.hibernate.mapping.Collection; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.mapping.RootClass; +import org.hibernate.metadata.ClassMetadata; +import org.hibernate.metadata.CollectionMetadata; +import org.hibernate.persister.PersisterFactory; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.proxy.EntityNotFoundDelegate; +import org.hibernate.stat.Statistics; +import org.hibernate.stat.StatisticsImpl; +import org.hibernate.stat.StatisticsImplementor; +import org.hibernate.tool.hbm2ddl.SchemaExport; +import org.hibernate.tool.hbm2ddl.SchemaUpdate; +import org.hibernate.tool.hbm2ddl.SchemaValidator; +import org.hibernate.transaction.TransactionFactory; +import org.hibernate.type.AssociationType; +import org.hibernate.type.Type; +import org.hibernate.util.CollectionHelper; +import org.hibernate.util.ReflectHelper; + + +/** + * Concrete implementation of the SessionFactory interface. Has the following + * responsibilites + *
      + *
    • caches configuration settings (immutably) + *
    • caches "compiled" mappings ie. EntityPersisters and + * CollectionPersisters (immutable) + *
    • caches "compiled" queries (memory sensitive cache) + *
    • manages PreparedStatements + *
    • delegates JDBC Connection management to the ConnectionProvider + *
    • factory for instances of SessionImpl + *
    + * This class must appear immutable to clients, even if it does all kinds of caching + * and pooling under the covers. It is crucial that the class is not only thread + * safe, but also highly concurrent. Synchronization must be used extremely sparingly. + * + * @see org.hibernate.connection.ConnectionProvider + * @see org.hibernate.classic.Session + * @see org.hibernate.hql.QueryTranslator + * @see org.hibernate.persister.entity.EntityPersister + * @see org.hibernate.persister.collection.CollectionPersister + * @author Gavin King + */ +public final class SessionFactoryImpl implements SessionFactory, SessionFactoryImplementor { + + private final String name; + private final String uuid; + + private final transient Map entityPersisters; + private final transient Map classMetadata; + private final transient Map collectionPersisters; + private final transient Map collectionMetadata; + private final transient Map collectionRolesByEntityParticipant; + private final transient Map identifierGenerators; + private final transient Map namedQueries; + private final transient Map namedSqlQueries; + private final transient Map sqlResultSetMappings; + private final transient Map filters; + private final transient Map imports; + private final transient Interceptor interceptor; + private final transient Settings settings; + private final transient Properties properties; + private transient SchemaExport schemaExport; + private final transient TransactionManager transactionManager; + private final transient QueryCache queryCache; + private final transient UpdateTimestampsCache updateTimestampsCache; + private final transient Map queryCaches; + private final transient Map allCacheRegions = new HashMap(); + private final transient StatisticsImpl statistics = new StatisticsImpl(this); + private final transient EventListeners eventListeners; + private final transient CurrentSessionContext currentSessionContext; + private final transient EntityNotFoundDelegate entityNotFoundDelegate; + private final transient SQLFunctionRegistry sqlFunctionRegistry; + + private final QueryPlanCache queryPlanCache = new QueryPlanCache( this ); + + private transient boolean isClosed = false; + + + private static final IdentifierGenerator UUID_GENERATOR = new UUIDHexGenerator(); + + private static final Log log = LogFactory.getLog(SessionFactoryImpl.class); + + public SessionFactoryImpl( + Configuration cfg, + Mapping mapping, + Settings settings, + EventListeners listeners) + throws HibernateException { + + log.info("building session factory"); + + this.properties = new Properties(); + this.properties.putAll( cfg.getProperties() ); + this.interceptor = cfg.getInterceptor(); + this.settings = settings; + this.sqlFunctionRegistry = new SQLFunctionRegistry(settings.getDialect(), cfg.getSqlFunctions()); + this.eventListeners = listeners; + this.filters = new HashMap(); + this.filters.putAll( cfg.getFilterDefinitions() ); + + if ( log.isDebugEnabled() ) { + log.debug("Session factory constructed with filter configurations : " + filters); + } + + if ( log.isDebugEnabled() ) { + log.debug( + "instantiating session factory with properties: " + properties + ); + } + + // Caches + settings.getCacheProvider().start( properties ); + + //Generators: + + identifierGenerators = new HashMap(); + Iterator classes = cfg.getClassMappings(); + while ( classes.hasNext() ) { + PersistentClass model = (PersistentClass) classes.next(); + if ( !model.isInherited() ) { + IdentifierGenerator generator = model.getIdentifier().createIdentifierGenerator( + settings.getDialect(), + settings.getDefaultCatalogName(), + settings.getDefaultSchemaName(), + (RootClass) model + ); + identifierGenerators.put( model.getEntityName(), generator ); + } + } + + //Persisters: + + Map caches = new HashMap(); + entityPersisters = new HashMap(); + Map classMeta = new HashMap(); + classes = cfg.getClassMappings(); + while ( classes.hasNext() ) { + PersistentClass model = (PersistentClass) classes.next(); + model.prepareTemporaryTables( mapping, settings.getDialect() ); + String cacheRegion = model.getRootClass().getCacheRegionName(); + CacheConcurrencyStrategy cache = (CacheConcurrencyStrategy) caches.get(cacheRegion); + if (cache==null) { + cache = CacheFactory.createCache( + model.getCacheConcurrencyStrategy(), + cacheRegion, + model.isMutable(), + settings, + properties + ); + if (cache!=null) { + caches.put(cacheRegion, cache); + allCacheRegions.put( cache.getRegionName(), cache.getCache() ); + } + } + EntityPersister cp = PersisterFactory.createClassPersister(model, cache, this, mapping); + if ( cache != null && cache.getCache() instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache.getCache() ).setSource( cp ); + } + entityPersisters.put( model.getEntityName(), cp ); + classMeta.put( model.getEntityName(), cp.getClassMetadata() ); + } + classMetadata = Collections.unmodifiableMap(classMeta); + + Map tmpEntityToCollectionRoleMap = new HashMap(); + collectionPersisters = new HashMap(); + Iterator collections = cfg.getCollectionMappings(); + while ( collections.hasNext() ) { + Collection model = (Collection) collections.next(); + CacheConcurrencyStrategy cache = CacheFactory.createCache( + model.getCacheConcurrencyStrategy(), + model.getCacheRegionName(), + model.isMutable(), + settings, + properties + ); + if ( cache != null ) { + allCacheRegions.put( cache.getRegionName(), cache.getCache() ); + } + CollectionPersister persister = PersisterFactory.createCollectionPersister(cfg, model, cache, this); + collectionPersisters.put( model.getRole(), persister.getCollectionMetadata() ); + Type indexType = persister.getIndexType(); + if ( indexType != null && indexType.isAssociationType() && !indexType.isAnyType() ) { + String entityName = ( ( AssociationType ) indexType ).getAssociatedEntityName( this ); + Set roles = ( Set ) tmpEntityToCollectionRoleMap.get( entityName ); + if ( roles == null ) { + roles = new HashSet(); + tmpEntityToCollectionRoleMap.put( entityName, roles ); + } + roles.add( persister.getRole() ); + } + Type elementType = persister.getElementType(); + if ( elementType.isAssociationType() && !elementType.isAnyType() ) { + String entityName = ( ( AssociationType ) elementType ).getAssociatedEntityName( this ); + Set roles = ( Set ) tmpEntityToCollectionRoleMap.get( entityName ); + if ( roles == null ) { + roles = new HashSet(); + tmpEntityToCollectionRoleMap.put( entityName, roles ); + } + roles.add( persister.getRole() ); + } + } + collectionMetadata = Collections.unmodifiableMap(collectionPersisters); + Iterator itr = tmpEntityToCollectionRoleMap.entrySet().iterator(); + while ( itr.hasNext() ) { + final Map.Entry entry = ( Map.Entry ) itr.next(); + entry.setValue( Collections.unmodifiableSet( ( Set ) entry.getValue() ) ); + } + collectionRolesByEntityParticipant = Collections.unmodifiableMap( tmpEntityToCollectionRoleMap ); + + //Named Queries: + namedQueries = new HashMap( cfg.getNamedQueries() ); + namedSqlQueries = new HashMap( cfg.getNamedSQLQueries() ); + sqlResultSetMappings = new HashMap( cfg.getSqlResultSetMappings() ); + imports = new HashMap( cfg.getImports() ); + + // after *all* persisters and named queries are registered + Iterator iter = entityPersisters.values().iterator(); + while ( iter.hasNext() ) { + ( (EntityPersister) iter.next() ).postInstantiate(); + } + iter = collectionPersisters.values().iterator(); + while ( iter.hasNext() ) { + ( (CollectionPersister) iter.next() ).postInstantiate(); + } + + //JNDI + Serialization: + + name = settings.getSessionFactoryName(); + try { + uuid = (String) UUID_GENERATOR.generate(null, null); + } + catch (Exception e) { + throw new AssertionFailure("Could not generate UUID"); + } + SessionFactoryObjectFactory.addInstance(uuid, name, this, properties); + + log.debug("instantiated session factory"); + + if ( settings.isAutoCreateSchema() ) { + new SchemaExport( cfg, settings ).create( false, true ); + } + if ( settings.isAutoUpdateSchema() ) { + new SchemaUpdate( cfg, settings ).execute( false, true ); + } + if ( settings.isAutoValidateSchema() ) { + new SchemaValidator( cfg, settings ).validate(); + } + if ( settings.isAutoDropSchema() ) { + schemaExport = new SchemaExport( cfg, settings ); + } + + if ( settings.getTransactionManagerLookup()!=null ) { + log.debug("obtaining JTA TransactionManager"); + transactionManager = settings.getTransactionManagerLookup().getTransactionManager(properties); + } + else { + if ( settings.getTransactionFactory().isTransactionManagerRequired() ) { + throw new HibernateException("The chosen transaction strategy requires access to the JTA TransactionManager"); + } + transactionManager = null; + } + + currentSessionContext = buildCurrentSessionContext(); + + if ( settings.isQueryCacheEnabled() ) { + updateTimestampsCache = new UpdateTimestampsCache(settings, properties); + queryCache = settings.getQueryCacheFactory() + .getQueryCache(null, updateTimestampsCache, settings, properties); + queryCaches = new HashMap(); + allCacheRegions.put( updateTimestampsCache.getRegionName(), updateTimestampsCache.getCache() ); + allCacheRegions.put( queryCache.getRegionName(), queryCache.getCache() ); + } + else { + updateTimestampsCache = null; + queryCache = null; + queryCaches = null; + } + + //checking for named queries + if ( settings.isNamedQueryStartupCheckingEnabled() ) { + Map errors = checkNamedQueries(); + if ( !errors.isEmpty() ) { + Set keys = errors.keySet(); + StringBuffer failingQueries = new StringBuffer( "Errors in named queries: " ); + for ( Iterator iterator = keys.iterator() ; iterator.hasNext() ; ) { + String queryName = ( String ) iterator.next(); + HibernateException e = ( HibernateException ) errors.get( queryName ); + failingQueries.append( queryName ); + if ( iterator.hasNext() ) { + failingQueries.append( ", " ); + } + log.error( "Error in named query: " + queryName, e ); + } + throw new HibernateException( failingQueries.toString() ); + } + } + + //stats + getStatistics().setStatisticsEnabled( settings.isStatisticsEnabled() ); + + // EntityNotFoundDelegate + EntityNotFoundDelegate entityNotFoundDelegate = cfg.getEntityNotFoundDelegate(); + if ( entityNotFoundDelegate == null ) { + entityNotFoundDelegate = new EntityNotFoundDelegate() { + public void handleEntityNotFound(String entityName, Serializable id) { + throw new ObjectNotFoundException( id, entityName ); + } + }; + } + this.entityNotFoundDelegate = entityNotFoundDelegate; + } + + public QueryPlanCache getQueryPlanCache() { + return queryPlanCache; + } + + private Map checkNamedQueries() throws HibernateException { + Map errors = new HashMap(); + + // Check named HQL queries + log.debug("Checking " + namedQueries.size() + " named HQL queries"); + Iterator itr = namedQueries.entrySet().iterator(); + while ( itr.hasNext() ) { + final Map.Entry entry = ( Map.Entry ) itr.next(); + final String queryName = ( String ) entry.getKey(); + final NamedQueryDefinition qd = ( NamedQueryDefinition ) entry.getValue(); + // this will throw an error if there's something wrong. + try { + log.debug("Checking named query: " + queryName); + //TODO: BUG! this currently fails for named queries for non-POJO entities + queryPlanCache.getHQLQueryPlan( qd.getQueryString(), false, CollectionHelper.EMPTY_MAP ); + } + catch ( QueryException e ) { + errors.put( queryName, e ); + } + catch ( MappingException e ) { + errors.put( queryName, e ); + } + } + + log.debug("Checking " + namedSqlQueries.size() + " named SQL queries"); + itr = namedSqlQueries.entrySet().iterator(); + while ( itr.hasNext() ) { + final Map.Entry entry = ( Map.Entry ) itr.next(); + final String queryName = ( String ) entry.getKey(); + final NamedSQLQueryDefinition qd = ( NamedSQLQueryDefinition ) entry.getValue(); + // this will throw an error if there's something wrong. + try { + log.debug("Checking named SQL query: " + queryName); + // TODO : would be really nice to cache the spec on the query-def so as to not have to re-calc the hash; + // currently not doable though because of the resultset-ref stuff... + NativeSQLQuerySpecification spec = null; + if ( qd.getResultSetRef() != null ) { + ResultSetMappingDefinition definition = ( ResultSetMappingDefinition ) + sqlResultSetMappings.get( qd.getResultSetRef() ); + if ( definition == null ) { + throw new MappingException( + "Unable to find resultset-ref definition: " + qd.getResultSetRef() + ); + } + spec = new NativeSQLQuerySpecification( + qd.getQueryString(), + definition.getQueryReturns(), + qd.getQuerySpaces() + ); + } + else + { + spec = new NativeSQLQuerySpecification( + qd.getQueryString(), + qd.getQueryReturns(), + qd.getQuerySpaces() + ); + } + queryPlanCache.getNativeSQLQueryPlan( spec ); + } + catch ( QueryException e ) { + errors.put( queryName, e ); + } + catch ( MappingException e ) { + errors.put( queryName, e ); + } + } + + return errors; + } + + public StatelessSession openStatelessSession() { + return new StatelessSessionImpl( null, this ); + } + + public StatelessSession openStatelessSession(Connection connection) { + return new StatelessSessionImpl( connection, this ); + } + + private SessionImpl openSession( + Connection connection, + boolean autoClose, + long timestamp, + Interceptor sessionLocalInterceptor + ) { + return new SessionImpl( + connection, + this, + autoClose, + timestamp, + sessionLocalInterceptor == null ? interceptor : sessionLocalInterceptor, + settings.getDefaultEntityMode(), + settings.isFlushBeforeCompletionEnabled(), + settings.isAutoCloseSessionEnabled(), + settings.getConnectionReleaseMode() + ); + } + + public org.hibernate.classic.Session openSession(Connection connection, Interceptor sessionLocalInterceptor) { + return openSession(connection, false, Long.MIN_VALUE, sessionLocalInterceptor); + } + + public org.hibernate.classic.Session openSession(Interceptor sessionLocalInterceptor) + throws HibernateException { + // note that this timestamp is not correct if the connection provider + // returns an older JDBC connection that was associated with a + // transaction that was already begun before openSession() was called + // (don't know any possible solution to this!) + long timestamp = settings.getCacheProvider().nextTimestamp(); + return openSession( null, true, timestamp, sessionLocalInterceptor ); + } + + public org.hibernate.classic.Session openSession(Connection connection) { + return openSession(connection, interceptor); //prevents this session from adding things to cache + } + + public org.hibernate.classic.Session openSession() throws HibernateException { + return openSession(interceptor); + } + + public org.hibernate.classic.Session openTemporarySession() throws HibernateException { + return new SessionImpl( + null, + this, + true, + settings.getCacheProvider().nextTimestamp(), + interceptor, + settings.getDefaultEntityMode(), + false, + false, + ConnectionReleaseMode.AFTER_STATEMENT + ); + } + + public org.hibernate.classic.Session openSession( + final Connection connection, + final boolean flushBeforeCompletionEnabled, + final boolean autoCloseSessionEnabled, + final ConnectionReleaseMode connectionReleaseMode) throws HibernateException { + return new SessionImpl( + connection, + this, + true, + settings.getCacheProvider().nextTimestamp(), + interceptor, + settings.getDefaultEntityMode(), + flushBeforeCompletionEnabled, + autoCloseSessionEnabled, + connectionReleaseMode + ); + } + + public org.hibernate.classic.Session getCurrentSession() throws HibernateException { + if ( currentSessionContext == null ) { + throw new HibernateException( "No CurrentSessionContext configured!" ); + } + return currentSessionContext.currentSession(); + } + + public EntityPersister getEntityPersister(String entityName) throws MappingException { + EntityPersister result = (EntityPersister) entityPersisters.get(entityName); + if (result==null) { + throw new MappingException( "Unknown entity: " + entityName ); + } + return result; + } + + public CollectionPersister getCollectionPersister(String role) throws MappingException { + CollectionPersister result = (CollectionPersister) collectionPersisters.get(role); + if (result==null) { + throw new MappingException( "Unknown collection role: " + role ); + } + return result; + } + + public Settings getSettings() { + return settings; + } + + public Dialect getDialect() { + return settings.getDialect(); + } + + public Interceptor getInterceptor() + { + return interceptor; + } + + public TransactionFactory getTransactionFactory() { + return settings.getTransactionFactory(); + } + + public TransactionManager getTransactionManager() { + return transactionManager; + } + + public SQLExceptionConverter getSQLExceptionConverter() { + return settings.getSQLExceptionConverter(); + } + + public Set getCollectionRolesByEntityParticipant(String entityName) { + return ( Set ) collectionRolesByEntityParticipant.get( entityName ); + } + + // from javax.naming.Referenceable + public Reference getReference() throws NamingException { + log.debug("Returning a Reference to the SessionFactory"); + return new Reference( + SessionFactoryImpl.class.getName(), + new StringRefAddr("uuid", uuid), + SessionFactoryObjectFactory.class.getName(), + null + ); + } + + private Object readResolve() throws ObjectStreamException { + log.trace("Resolving serialized SessionFactory"); + // look for the instance by uuid + Object result = SessionFactoryObjectFactory.getInstance(uuid); + if (result==null) { + // in case we were deserialized in a different JVM, look for an instance with the same name + // (alternatively we could do an actual JNDI lookup here....) + result = SessionFactoryObjectFactory.getNamedInstance(name); + if (result==null) { + throw new InvalidObjectException("Could not find a SessionFactory named: " + name); + } + else { + log.debug("resolved SessionFactory by name"); + } + } + else { + log.debug("resolved SessionFactory by uid"); + } + return result; + } + + public NamedQueryDefinition getNamedQuery(String queryName) { + return (NamedQueryDefinition) namedQueries.get(queryName); + } + + public NamedSQLQueryDefinition getNamedSQLQuery(String queryName) { + return (NamedSQLQueryDefinition) namedSqlQueries.get(queryName); + } + + public ResultSetMappingDefinition getResultSetMapping(String resultSetName) { + return (ResultSetMappingDefinition) sqlResultSetMappings.get(resultSetName); + } + + public Type getIdentifierType(String className) throws MappingException { + return getEntityPersister(className).getIdentifierType(); + } + public String getIdentifierPropertyName(String className) throws MappingException { + return getEntityPersister(className).getIdentifierPropertyName(); + } + + private final void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + log.trace("deserializing"); + in.defaultReadObject(); + log.debug("deserialized: " + uuid); + } + private final void writeObject(ObjectOutputStream out) throws IOException { + log.debug("serializing: " + uuid); + out.defaultWriteObject(); + log.trace("serialized"); + } + + public Type[] getReturnTypes(String queryString) throws HibernateException { + return queryPlanCache.getHQLQueryPlan( queryString, false, CollectionHelper.EMPTY_MAP ).getReturnMetadata().getReturnTypes(); + } + + public String[] getReturnAliases(String queryString) throws HibernateException { + return queryPlanCache.getHQLQueryPlan( queryString, false, CollectionHelper.EMPTY_MAP ).getReturnMetadata().getReturnAliases(); + } + + public ClassMetadata getClassMetadata(Class persistentClass) throws HibernateException { + return getClassMetadata( persistentClass.getName() ); + } + + public CollectionMetadata getCollectionMetadata(String roleName) throws HibernateException { + return (CollectionMetadata) collectionMetadata.get(roleName); + } + + public ClassMetadata getClassMetadata(String entityName) throws HibernateException { + return (ClassMetadata) classMetadata.get(entityName); + } + + /** + * Return the names of all persistent (mapped) classes that extend or implement the + * given class or interface, accounting for implicit/explicit polymorphism settings + * and excluding mapped subclasses/joined-subclasses of other classes in the result. + */ + public String[] getImplementors(String className) throws MappingException { + + final Class clazz; + try { + clazz = ReflectHelper.classForName(className); + } + catch (ClassNotFoundException cnfe) { + return new String[] { className }; //for a dynamic-class + } + + ArrayList results = new ArrayList(); + Iterator iter = entityPersisters.values().iterator(); + while ( iter.hasNext() ) { + //test this entity to see if we must query it + EntityPersister testPersister = (EntityPersister) iter.next(); + if ( testPersister instanceof Queryable ) { + Queryable testQueryable = (Queryable) testPersister; + String testClassName = testQueryable.getEntityName(); + boolean isMappedClass = className.equals(testClassName); + if ( testQueryable.isExplicitPolymorphism() ) { + if ( isMappedClass ) { + return new String[] {className}; //NOTE EARLY EXIT + } + } + else { + if (isMappedClass) { + results.add(testClassName); + } + else { + final Class mappedClass = testQueryable.getMappedClass( EntityMode.POJO ); + if ( mappedClass!=null && clazz.isAssignableFrom( mappedClass ) ) { + final boolean assignableSuperclass; + if ( testQueryable.isInherited() ) { + Class mappedSuperclass = getEntityPersister( testQueryable.getMappedSuperclass() ).getMappedClass( EntityMode.POJO); + assignableSuperclass = clazz.isAssignableFrom(mappedSuperclass); + } + else { + assignableSuperclass = false; + } + if ( !assignableSuperclass ) { + results.add( testClassName ); + } + } + } + } + } + } + return (String[]) results.toArray( new String[ results.size() ] ); + } + + public String getImportedClassName(String className) { + String result = (String) imports.get(className); + if (result==null) { + try { + ReflectHelper.classForName(className); + return className; + } + catch (ClassNotFoundException cnfe) { + return null; + } + } + else { + return result; + } + } + + public Map getAllClassMetadata() throws HibernateException { + return classMetadata; + } + + public Map getAllCollectionMetadata() throws HibernateException { + return collectionMetadata; + } + + /** + * Closes the session factory, releasing all held resources. + * + *
      + *
    1. cleans up used cache regions and "stops" the cache provider. + *
    2. close the JDBC connection + *
    3. remove the JNDI binding + *
    + * + * Note: Be aware that the sessionfactory instance still can + * be a "heavy" object memory wise after close() has been called. Thus + * it is important to not keep referencing the instance to let the garbage + * collector release the memory. + */ + public void close() throws HibernateException { + + log.info("closing"); + + isClosed = true; + + Iterator iter = entityPersisters.values().iterator(); + while ( iter.hasNext() ) { + EntityPersister p = (EntityPersister) iter.next(); + if ( p.hasCache() ) { + p.getCache().destroy(); + } + } + + iter = collectionPersisters.values().iterator(); + while ( iter.hasNext() ) { + CollectionPersister p = (CollectionPersister) iter.next(); + if ( p.hasCache() ) { + p.getCache().destroy(); + } + } + + if ( settings.isQueryCacheEnabled() ) { + queryCache.destroy(); + + iter = queryCaches.values().iterator(); + while ( iter.hasNext() ) { + QueryCache cache = (QueryCache) iter.next(); + cache.destroy(); + } + updateTimestampsCache.destroy(); + } + + settings.getCacheProvider().stop(); + + try { + settings.getConnectionProvider().close(); + } + finally { + SessionFactoryObjectFactory.removeInstance(uuid, name, properties); + } + + if ( settings.isAutoDropSchema() ) { + schemaExport.drop( false, true ); + } + + } + + public void evictEntity(String entityName, Serializable id) throws HibernateException { + EntityPersister p = getEntityPersister(entityName); + if ( p.hasCache() ) { + if ( log.isDebugEnabled() ) { + log.debug( "evicting second-level cache: " + MessageHelper.infoString(p, id, this) ); + } + CacheKey cacheKey = new CacheKey( id, p.getIdentifierType(), p.getRootEntityName(), EntityMode.POJO, this ); + p.getCache().remove( cacheKey ); + } + } + + public void evictEntity(String entityName) throws HibernateException { + EntityPersister p = getEntityPersister(entityName); + if ( p.hasCache() ) { + if ( log.isDebugEnabled() ) { + log.debug( "evicting second-level cache: " + p.getEntityName() ); + } + p.getCache().clear(); + } + } + + public void evict(Class persistentClass, Serializable id) throws HibernateException { + EntityPersister p = getEntityPersister( persistentClass.getName() ); + if ( p.hasCache() ) { + if ( log.isDebugEnabled() ) { + log.debug( "evicting second-level cache: " + MessageHelper.infoString(p, id, this) ); + } + CacheKey cacheKey = new CacheKey( id, p.getIdentifierType(), p.getRootEntityName(), EntityMode.POJO, this ); + p.getCache().remove( cacheKey ); + } + } + + public void evict(Class persistentClass) throws HibernateException { + EntityPersister p = getEntityPersister( persistentClass.getName() ); + if ( p.hasCache() ) { + if ( log.isDebugEnabled() ) { + log.debug( "evicting second-level cache: " + p.getEntityName() ); + } + p.getCache().clear(); + } + } + + public void evictCollection(String roleName, Serializable id) throws HibernateException { + CollectionPersister p = getCollectionPersister(roleName); + if ( p.hasCache() ) { + if ( log.isDebugEnabled() ) { + log.debug( "evicting second-level cache: " + MessageHelper.collectionInfoString(p, id, this) ); + } + CacheKey cacheKey = new CacheKey( id, p.getKeyType(), p.getRole(), EntityMode.POJO, this ); + p.getCache().remove( cacheKey ); + } + } + + public void evictCollection(String roleName) throws HibernateException { + CollectionPersister p = getCollectionPersister(roleName); + if ( p.hasCache() ) { + if ( log.isDebugEnabled() ) { + log.debug( "evicting second-level cache: " + p.getRole() ); + } + p.getCache().clear(); + } + } + + public Type getReferencedPropertyType(String className, String propertyName) + throws MappingException { + return getEntityPersister(className).getPropertyType(propertyName); + } + + public ConnectionProvider getConnectionProvider() { + return settings.getConnectionProvider(); + } + + public UpdateTimestampsCache getUpdateTimestampsCache() { + return updateTimestampsCache; + } + + public QueryCache getQueryCache() { + return queryCache; + } + + public QueryCache getQueryCache(String cacheRegion) throws HibernateException { + if (cacheRegion==null) { + return getQueryCache(); + } + + if ( !settings.isQueryCacheEnabled() ) { + return null; + } + + synchronized (allCacheRegions) { + QueryCache currentQueryCache = (QueryCache) queryCaches.get(cacheRegion); + if (currentQueryCache==null) { + currentQueryCache = settings.getQueryCacheFactory() + .getQueryCache(cacheRegion, updateTimestampsCache, settings, properties); + queryCaches.put(cacheRegion, currentQueryCache); + allCacheRegions.put( currentQueryCache.getRegionName(), currentQueryCache.getCache() ); + } + return currentQueryCache; + } + } + + public Cache getSecondLevelCacheRegion(String regionName) { + synchronized (allCacheRegions) { + return (Cache) allCacheRegions.get(regionName); + } + } + + public Map getAllSecondLevelCacheRegions() { + synchronized (allCacheRegions) { + return new HashMap(allCacheRegions); + } + } + + public boolean isClosed() { + return isClosed; + } + + public Statistics getStatistics() { + return statistics; + } + + public StatisticsImplementor getStatisticsImplementor() { + return statistics; + } + + public void evictQueries() throws HibernateException { + if ( settings.isQueryCacheEnabled() ) { + queryCache.clear(); + } + } + + public void evictQueries(String cacheRegion) throws HibernateException { + if (cacheRegion==null) { + throw new NullPointerException("use the zero-argument form to evict the default query cache"); + } + else { + synchronized (allCacheRegions) { + if ( settings.isQueryCacheEnabled() ) { + QueryCache currentQueryCache = (QueryCache) queryCaches.get(cacheRegion); + if ( currentQueryCache != null ) { + currentQueryCache.clear(); + } + } + } + } + } + + public FilterDefinition getFilterDefinition(String filterName) throws HibernateException { + FilterDefinition def = ( FilterDefinition ) filters.get( filterName ); + if ( def == null ) { + throw new HibernateException( "No such filter configured [" + filterName + "]" ); + } + return def; + } + + public Set getDefinedFilterNames() { + return filters.keySet(); + } + + public BatcherFactory getBatcherFactory() { + return settings.getBatcherFactory(); + } + + public IdentifierGenerator getIdentifierGenerator(String rootEntityName) { + return (IdentifierGenerator) identifierGenerators.get(rootEntityName); + } + + private CurrentSessionContext buildCurrentSessionContext() { + String impl = properties.getProperty( Environment.CURRENT_SESSION_CONTEXT_CLASS ); + // for backward-compatability + if ( impl == null && transactionManager != null ) { + impl = "jta"; + } + + if ( impl == null ) { + return null; + } + else if ( "jta".equals( impl ) ) { + if ( settings.getTransactionFactory().areCallbacksLocalToHibernateTransactions() ) { + log.warn( "JTASessionContext being used with JDBCTransactionFactory; auto-flush will not operate correctly with getCurrentSession()" ); + } + return new JTASessionContext( this ); + } + else if ( "thread".equals( impl ) ) { + return new ThreadLocalSessionContext( this ); + } + else if ( "managed".equals( impl ) ) { + return new ManagedSessionContext( this ); + } + else { + try { + Class implClass = ReflectHelper.classForName( impl ); + return ( CurrentSessionContext ) implClass + .getConstructor( new Class[] { SessionFactoryImplementor.class } ) + .newInstance( new Object[] { this } ); + } + catch( Throwable t ) { + log.error( "Unable to construct current session context [" + impl + "]", t ); + return null; + } + } + } + + public EventListeners getEventListeners() + { + return eventListeners; + } + + public EntityNotFoundDelegate getEntityNotFoundDelegate() { + return entityNotFoundDelegate; + } + + /** + * Custom serialization hook used during Session serialization. + * + * @param oos The stream to which to write the factory + * @throws IOException + */ + void serialize(ObjectOutputStream oos) throws IOException { + oos.writeUTF( uuid ); + oos.writeBoolean( name != null ); + if ( name != null ) { + oos.writeUTF( name ); + } + } + + /** + * Custom deserialization hook used during Session deserialization. + * + * @param ois The stream from which to "read" the factory + * @throws IOException + */ + static SessionFactoryImpl deserialize(ObjectInputStream ois) throws IOException, ClassNotFoundException { + String uuid = ois.readUTF(); + boolean isNamed = ois.readBoolean(); + String name = null; + if ( isNamed ) { + name = ois.readUTF(); + } + Object result = SessionFactoryObjectFactory.getInstance( uuid ); + if ( result == null ) { + log.trace( "could not locate session factory by uuid [" + uuid + "] during session deserialization; trying name" ); + if ( isNamed ) { + result = SessionFactoryObjectFactory.getNamedInstance( name ); + } + if ( result == null ) { + throw new InvalidObjectException( "could not resolve session factory during session deserialization [uuid=" + uuid + ", name=" + name + "]" ); + } + } + return ( SessionFactoryImpl ) result; + } + + public SQLFunctionRegistry getSqlFunctionRegistry() { + return sqlFunctionRegistry; + } +} diff --git a/src/org/hibernate/impl/SessionFactoryObjectFactory.java b/src/org/hibernate/impl/SessionFactoryObjectFactory.java new file mode 100644 index 0000000000..36a1b331de --- /dev/null +++ b/src/org/hibernate/impl/SessionFactoryObjectFactory.java @@ -0,0 +1,161 @@ +//$Id$ +package org.hibernate.impl; + +import java.util.Hashtable; +import java.util.Iterator; +import java.util.Properties; + +import javax.naming.Context; +import javax.naming.InvalidNameException; +import javax.naming.Name; +import javax.naming.NamingException; +import javax.naming.Reference; +import javax.naming.event.EventContext; +import javax.naming.event.NamespaceChangeListener; +import javax.naming.event.NamingEvent; +import javax.naming.event.NamingExceptionEvent; +import javax.naming.event.NamingListener; +import javax.naming.spi.ObjectFactory; + +import org.hibernate.SessionFactory; +import org.hibernate.util.FastHashMap; +import org.hibernate.util.NamingHelper; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Resolves SessionFactory JNDI lookups and deserialization + */ +public class SessionFactoryObjectFactory implements ObjectFactory { + + private static final SessionFactoryObjectFactory INSTANCE; //to stop the class from being unloaded + + private static final Log log; + + static { + log = LogFactory.getLog(SessionFactoryObjectFactory.class); + INSTANCE = new SessionFactoryObjectFactory(); + log.debug("initializing class SessionFactoryObjectFactory"); + } + + private static final FastHashMap INSTANCES = new FastHashMap(); + private static final FastHashMap NAMED_INSTANCES = new FastHashMap(); + + private static final NamingListener LISTENER = new NamespaceChangeListener() { + public void objectAdded(NamingEvent evt) { + log.debug( "A factory was successfully bound to name: " + evt.getNewBinding().getName() ); + } + public void objectRemoved(NamingEvent evt) { + String name = evt.getOldBinding().getName(); + log.info("A factory was unbound from name: " + name); + Object instance = NAMED_INSTANCES.remove(name); + Iterator iter = INSTANCES.values().iterator(); + while ( iter.hasNext() ) { + if ( iter.next()==instance ) iter.remove(); + } + } + public void objectRenamed(NamingEvent evt) { + String name = evt.getOldBinding().getName(); + log.info("A factory was renamed from name: " + name); + NAMED_INSTANCES.put( evt.getNewBinding().getName(), NAMED_INSTANCES.remove(name) ); + } + public void namingExceptionThrown(NamingExceptionEvent evt) { + log.warn( "Naming exception occurred accessing factory: " + evt.getException() ); + } + }; + + public Object getObjectInstance(Object reference, Name name, Context ctx, Hashtable env) throws Exception { + log.debug("JNDI lookup: " + name); + String uid = (String) ( (Reference) reference ).get(0).getContent(); + return getInstance(uid); + } + + public static void addInstance(String uid, String name, SessionFactory instance, Properties properties) { + + log.debug("registered: " + uid + " (" + ( (name==null) ? "unnamed" : name ) + ')'); + INSTANCES.put(uid, instance); + if (name!=null) NAMED_INSTANCES.put(name, instance); + + //must add to JNDI _after_ adding to HashMaps, because some JNDI servers use serialization + if (name==null) { + log.info("Not binding factory to JNDI, no JNDI name configured"); + } + else { + + log.info("Factory name: " + name); + + try { + Context ctx = NamingHelper.getInitialContext(properties); + NamingHelper.bind(ctx, name, instance); + log.info("Bound factory to JNDI name: " + name); + ( (EventContext) ctx ).addNamingListener(name, EventContext.OBJECT_SCOPE, LISTENER); + } + catch (InvalidNameException ine) { + log.error("Invalid JNDI name: " + name, ine); + } + catch (NamingException ne) { + log.warn("Could not bind factory to JNDI", ne); + } + catch(ClassCastException cce) { + log.warn("InitialContext did not implement EventContext"); + } + + } + + } + + public static void removeInstance(String uid, String name, Properties properties) { + //TODO: theoretically non-threadsafe... + + if (name!=null) { + log.info("Unbinding factory from JNDI name: " + name); + + try { + Context ctx = NamingHelper.getInitialContext(properties); + ctx.unbind(name); + log.info("Unbound factory from JNDI name: " + name); + } + catch (InvalidNameException ine) { + log.error("Invalid JNDI name: " + name, ine); + } + catch (NamingException ne) { + log.warn("Could not unbind factory from JNDI", ne); + } + + NAMED_INSTANCES.remove(name); + + } + + INSTANCES.remove(uid); + + } + + public static Object getNamedInstance(String name) { + log.debug("lookup: name=" + name); + Object result = NAMED_INSTANCES.get(name); + if (result==null) { + log.debug("Not found: " + name); + log.debug(NAMED_INSTANCES); + } + return result; + } + + public static Object getInstance(String uid) { + log.debug("lookup: uid=" + uid); + Object result = INSTANCES.get(uid); + if (result==null) { + log.debug("Not found: " + uid); + log.debug(INSTANCES); + } + return result; + } + +} + + + + + + + diff --git a/src/org/hibernate/impl/SessionImpl.java b/src/org/hibernate/impl/SessionImpl.java new file mode 100644 index 0000000000..04bc801285 --- /dev/null +++ b/src/org/hibernate/impl/SessionImpl.java @@ -0,0 +1,1958 @@ +//$Id$ +package org.hibernate.impl; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.sql.Connection; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.dom4j.Element; +import org.hibernate.CacheMode; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.Criteria; +import org.hibernate.EntityMode; +import org.hibernate.Filter; +import org.hibernate.FlushMode; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.ObjectDeletedException; +import org.hibernate.Query; +import org.hibernate.QueryException; +import org.hibernate.ReplicationMode; +import org.hibernate.SQLQuery; +import org.hibernate.ScrollMode; +import org.hibernate.ScrollableResults; +import org.hibernate.Session; +import org.hibernate.SessionException; +import org.hibernate.SessionFactory; +import org.hibernate.Transaction; +import org.hibernate.TransientObjectException; +import org.hibernate.UnresolvableObjectException; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.ActionQueue; +import org.hibernate.engine.CollectionEntry; +import org.hibernate.engine.EntityEntry; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.FilterDefinition; +import org.hibernate.engine.PersistenceContext; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.StatefulPersistenceContext; +import org.hibernate.engine.Status; +import org.hibernate.engine.query.FilterQueryPlan; +import org.hibernate.engine.query.HQLQueryPlan; +import org.hibernate.engine.query.NativeSQLQueryPlan; +import org.hibernate.event.AutoFlushEvent; +import org.hibernate.event.AutoFlushEventListener; +import org.hibernate.event.DeleteEvent; +import org.hibernate.event.DeleteEventListener; +import org.hibernate.event.DirtyCheckEvent; +import org.hibernate.event.DirtyCheckEventListener; +import org.hibernate.event.EventListeners; +import org.hibernate.event.EventSource; +import org.hibernate.event.EvictEvent; +import org.hibernate.event.EvictEventListener; +import org.hibernate.event.FlushEvent; +import org.hibernate.event.FlushEventListener; +import org.hibernate.event.InitializeCollectionEvent; +import org.hibernate.event.InitializeCollectionEventListener; +import org.hibernate.event.LoadEvent; +import org.hibernate.event.LoadEventListener; +import org.hibernate.event.LockEvent; +import org.hibernate.event.LockEventListener; +import org.hibernate.event.MergeEvent; +import org.hibernate.event.MergeEventListener; +import org.hibernate.event.PersistEvent; +import org.hibernate.event.PersistEventListener; +import org.hibernate.event.RefreshEvent; +import org.hibernate.event.RefreshEventListener; +import org.hibernate.event.ReplicateEvent; +import org.hibernate.event.ReplicateEventListener; +import org.hibernate.event.SaveOrUpdateEvent; +import org.hibernate.event.SaveOrUpdateEventListener; +import org.hibernate.event.LoadEventListener.LoadType; +import org.hibernate.jdbc.Batcher; +import org.hibernate.jdbc.JDBCContext; +import org.hibernate.loader.criteria.CriteriaLoader; +import org.hibernate.loader.custom.CustomLoader; +import org.hibernate.loader.custom.CustomQuery; +import org.hibernate.persister.collection.CollectionPersister; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.OuterJoinLoadable; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; +import org.hibernate.stat.SessionStatistics; +import org.hibernate.stat.SessionStatisticsImpl; +import org.hibernate.tuple.DynamicMapInstantiator; +import org.hibernate.type.Type; +import org.hibernate.util.ArrayHelper; +import org.hibernate.util.CollectionHelper; +import org.hibernate.util.StringHelper; + + +/** + * Concrete implementation of a Session, and also the central, organizing component + * of Hibernate's internal implementation. As such, this class exposes two interfaces; + * Session itself, to the application, and SessionImplementor, to other components + * of Hibernate. This class is not threadsafe. + * + * @author Gavin King + */ +public final class SessionImpl extends AbstractSessionImpl + implements EventSource, org.hibernate.classic.Session, JDBCContext.Context { + + // todo : need to find a clean way to handle the "event source" role + // a seperate classs responsible for generating/dispatching events just duplicates most of the Session methods... + // passing around seperate reto interceptor, factory, actionQueue, and persistentContext is not manageable... + + private static final Log log = LogFactory.getLog(SessionImpl.class); + + private transient EntityMode entityMode = EntityMode.POJO; + private transient boolean autoClear; //for EJB3 + + private transient long timestamp; + private transient FlushMode flushMode = FlushMode.AUTO; + private transient CacheMode cacheMode = CacheMode.NORMAL; + + private transient Interceptor interceptor; + + private transient int dontFlushFromFind = 0; + + private transient ActionQueue actionQueue; + private transient StatefulPersistenceContext persistenceContext; + private transient JDBCContext jdbcContext; + private transient EventListeners listeners; + + private transient boolean flushBeforeCompletionEnabled; + private transient boolean autoCloseSessionEnabled; + private transient ConnectionReleaseMode connectionReleaseMode; + + private transient String fetchProfile; + + private transient Map enabledFilters = new HashMap(); + + private transient Session rootSession; + private transient Map childSessionsByEntityMode; + + /** + * Constructor used in building "child sessions". + * + * @param parent The parent session + * @param entityMode + */ + private SessionImpl(SessionImpl parent, EntityMode entityMode) { + super( parent.factory ); + this.rootSession = parent; + this.timestamp = parent.timestamp; + this.jdbcContext = parent.jdbcContext; + this.interceptor = parent.interceptor; + this.listeners = parent.listeners; + this.actionQueue = new ActionQueue( this ); + this.entityMode = entityMode; + this.persistenceContext = new StatefulPersistenceContext( this ); + this.flushBeforeCompletionEnabled = false; + this.autoCloseSessionEnabled = false; + this.connectionReleaseMode = null; + + if ( factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().openSession(); + } + + log.debug( "opened session [" + entityMode + "]" ); + } + + /** + * Constructor used for openSession(...) processing, as well as construction + * of sessions for getCurrentSession(). + * + * @param connection The user-supplied connection to use for this session. + * @param factory The factory from which this session was obtained + * @param autoclose NOT USED + * @param timestamp The timestamp for this session + * @param interceptor The interceptor to be applied to this session + * @param entityMode The entity-mode for this session + * @param flushBeforeCompletionEnabled Should we auto flush before completion of transaction + * @param autoCloseSessionEnabled Should we auto close after completion of transaction + * @param connectionReleaseMode The mode by which we should release JDBC connections. + */ + SessionImpl( + final Connection connection, + final SessionFactoryImpl factory, + final boolean autoclose, + final long timestamp, + final Interceptor interceptor, + final EntityMode entityMode, + final boolean flushBeforeCompletionEnabled, + final boolean autoCloseSessionEnabled, + final ConnectionReleaseMode connectionReleaseMode) { + super( factory ); + this.rootSession = null; + this.timestamp = timestamp; + this.entityMode = entityMode; + this.interceptor = interceptor; + this.listeners = factory.getEventListeners(); + this.actionQueue = new ActionQueue( this ); + this.persistenceContext = new StatefulPersistenceContext( this ); + this.flushBeforeCompletionEnabled = flushBeforeCompletionEnabled; + this.autoCloseSessionEnabled = autoCloseSessionEnabled; + this.connectionReleaseMode = connectionReleaseMode; + this.jdbcContext = new JDBCContext( this, connection, interceptor ); + + if ( factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().openSession(); + } + + if ( log.isDebugEnabled() ) { + log.debug( "opened session at timestamp: " + timestamp ); + } + } + + public Session getSession(EntityMode entityMode) { + if ( this.entityMode == entityMode ) { + return this; + } + + if ( rootSession != null ) { + rootSession.getSession( entityMode ); + } + + errorIfClosed(); + checkTransactionSynchStatus(); + + SessionImpl rtn = null; + if ( childSessionsByEntityMode == null ) { + childSessionsByEntityMode = new HashMap(); + } + else { + rtn = (SessionImpl) childSessionsByEntityMode.get( entityMode ); + } + + if ( rtn == null ) { + rtn = new SessionImpl( this, entityMode ); + childSessionsByEntityMode.put( entityMode, rtn ); + } + + return rtn; + } + + public void clear() { + errorIfClosed(); + checkTransactionSynchStatus(); + persistenceContext.clear(); + actionQueue.clear(); + } + + public Batcher getBatcher() { + errorIfClosed(); + checkTransactionSynchStatus(); + // TODO : should remove this exposure + // and have all references to the session's batcher use the ConnectionManager. + return jdbcContext.getConnectionManager().getBatcher(); + } + + public long getTimestamp() { + checkTransactionSynchStatus(); + return timestamp; + } + + public Connection close() throws HibernateException { + log.trace( "closing session" ); + if ( isClosed() ) { + throw new SessionException( "Session was already closed" ); + } + + + if ( factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().closeSession(); + } + + try { + try { + if ( childSessionsByEntityMode != null ) { + Iterator childSessions = childSessionsByEntityMode.values().iterator(); + while ( childSessions.hasNext() ) { + final SessionImpl child = ( SessionImpl ) childSessions.next(); + child.close(); + } + } + } + catch( Throwable t ) { + // just ignore + } + + if ( rootSession == null ) { + return jdbcContext.getConnectionManager().close(); + } + else { + return null; + } + } + finally { + setClosed(); + cleanup(); + } + } + + public ConnectionReleaseMode getConnectionReleaseMode() { + checkTransactionSynchStatus(); + return connectionReleaseMode; + } + + public boolean isAutoCloseSessionEnabled() { + return autoCloseSessionEnabled; + } + + public boolean isOpen() { + checkTransactionSynchStatus(); + return !isClosed(); + } + + public boolean isFlushModeNever() { + return FlushMode.isManualFlushMode( getFlushMode() ); + } + + public boolean isFlushBeforeCompletionEnabled() { + return flushBeforeCompletionEnabled; + } + + public void managedFlush() { + if ( isClosed() ) { + log.trace( "skipping auto-flush due to session closed" ); + return; + } + log.trace("automatically flushing session"); + flush(); + + if ( childSessionsByEntityMode != null ) { + Iterator iter = childSessionsByEntityMode.values().iterator(); + while ( iter.hasNext() ) { + ( (Session) iter.next() ).flush(); + } + } + } + + public boolean shouldAutoClose() { + return isAutoCloseSessionEnabled() && !isClosed(); + } + + public void managedClose() { + log.trace( "automatically closing session" ); + close(); + } + + public Connection connection() throws HibernateException { + errorIfClosed(); + return jdbcContext.borrowConnection(); + } + + public boolean isConnected() { + checkTransactionSynchStatus(); + return !isClosed() && jdbcContext.getConnectionManager().isCurrentlyConnected(); + } + + public boolean isTransactionInProgress() { + checkTransactionSynchStatus(); + return !isClosed() && jdbcContext.isTransactionInProgress(); + } + + public Connection disconnect() throws HibernateException { + errorIfClosed(); + log.debug( "disconnecting session" ); + return jdbcContext.getConnectionManager().manualDisconnect(); + } + + public void reconnect() throws HibernateException { + errorIfClosed(); + log.debug( "reconnecting session" ); + checkTransactionSynchStatus(); + jdbcContext.getConnectionManager().manualReconnect(); + } + + public void reconnect(Connection conn) throws HibernateException { + errorIfClosed(); + log.debug( "reconnecting session" ); + checkTransactionSynchStatus(); + jdbcContext.getConnectionManager().manualReconnect( conn ); + } + + public void beforeTransactionCompletion(Transaction tx) { + log.trace( "before transaction completion" ); + if ( rootSession == null ) { + try { + interceptor.beforeTransactionCompletion(tx); + } + catch (Throwable t) { + log.error("exception in interceptor beforeTransactionCompletion()", t); + } + } + } + + public void setAutoClear(boolean enabled) { + errorIfClosed(); + autoClear = enabled; + } + + /** + * Check if there is a Hibernate or JTA transaction in progress and, + * if there is not, flush if necessary, make sure the connection has + * been committed (if it is not in autocommit mode) and run the after + * completion processing + */ + public void afterOperation(boolean success) { + if ( !jdbcContext.isTransactionInProgress() ) { + jdbcContext.afterNontransactionalQuery( success ); + } + } + + public void afterTransactionCompletion(boolean success, Transaction tx) { + log.trace( "after transaction completion" ); + persistenceContext.afterTransactionCompletion(); + actionQueue.afterTransactionCompletion(success); + if ( rootSession == null && tx != null ) { + try { + interceptor.afterTransactionCompletion(tx); + } + catch (Throwable t) { + log.error("exception in interceptor afterTransactionCompletion()", t); + } + } + if ( autoClear ) { + clear(); + } + } + + /** + * clear all the internal collections, just + * to help the garbage collector, does not + * clear anything that is needed during the + * afterTransactionCompletion() phase + */ + private void cleanup() { + persistenceContext.clear(); + } + + public LockMode getCurrentLockMode(Object object) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + if ( object == null ) { + throw new NullPointerException( "null object passed to getCurrentLockMode()" ); + } + if ( object instanceof HibernateProxy ) { + object = ( (HibernateProxy) object ).getHibernateLazyInitializer().getImplementation(this); + if ( object == null ) { + return LockMode.NONE; + } + } + EntityEntry e = persistenceContext.getEntry(object); + if ( e == null ) { + throw new TransientObjectException( "Given object not associated with the session" ); + } + if ( e.getStatus() != Status.MANAGED ) { + throw new ObjectDeletedException( + "The given object was deleted", + e.getId(), + e.getPersister().getEntityName() + ); + } + return e.getLockMode(); + } + + public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException { + errorIfClosed(); + // todo : should this get moved to PersistentContext? + // logically, is PersistentContext the "thing" to which an interceptor gets attached? + final Object result = persistenceContext.getEntity(key); + if ( result == null ) { + final Object newObject = interceptor.getEntity( key.getEntityName(), key.getIdentifier() ); + if ( newObject != null ) { + lock( newObject, LockMode.NONE ); + } + return newObject; + } + else { + return result; + } + } + + + // saveOrUpdate() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void saveOrUpdate(Object object) throws HibernateException { + saveOrUpdate(null, object); + } + + public void saveOrUpdate(String entityName, Object obj) throws HibernateException { + fireSaveOrUpdate( new SaveOrUpdateEvent(entityName, obj, this) ); + } + + private void fireSaveOrUpdate(SaveOrUpdateEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + SaveOrUpdateEventListener[] saveOrUpdateEventListener = listeners.getSaveOrUpdateEventListeners(); + for ( int i = 0; i < saveOrUpdateEventListener.length; i++ ) { + saveOrUpdateEventListener[i].onSaveOrUpdate(event); + } + } + + + // save() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void save(Object obj, Serializable id) throws HibernateException { + save(null, obj, id); + } + + public Serializable save(Object obj) throws HibernateException { + return save(null, obj); + } + + public Serializable save(String entityName, Object object) throws HibernateException { + return fireSave( new SaveOrUpdateEvent(entityName, object, this) ); + } + + public void save(String entityName, Object object, Serializable id) throws HibernateException { + fireSave( new SaveOrUpdateEvent(entityName, object, id, this) ); + } + + private Serializable fireSave(SaveOrUpdateEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + SaveOrUpdateEventListener[] saveEventListener = listeners.getSaveEventListeners(); + for ( int i = 0; i < saveEventListener.length; i++ ) { + saveEventListener[i].onSaveOrUpdate(event); + } + return event.getResultId(); + } + + + // update() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void update(Object obj) throws HibernateException { + update(null, obj); + } + + public void update(Object obj, Serializable id) throws HibernateException { + update(null, obj, id); + } + + public void update(String entityName, Object object) throws HibernateException { + fireUpdate( new SaveOrUpdateEvent(entityName, object, this) ); + } + + public void update(String entityName, Object object, Serializable id) throws HibernateException { + fireUpdate(new SaveOrUpdateEvent(entityName, object, id, this)); + } + + private void fireUpdate(SaveOrUpdateEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + SaveOrUpdateEventListener[] updateEventListener = listeners.getUpdateEventListeners(); + for ( int i = 0; i < updateEventListener.length; i++ ) { + updateEventListener[i].onSaveOrUpdate(event); + } + } + + + // lock() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException { + fireLock( new LockEvent(entityName, object, lockMode, this) ); + } + + public void lock(Object object, LockMode lockMode) throws HibernateException { + fireLock( new LockEvent(object, lockMode, this) ); + } + + private void fireLock(LockEvent lockEvent) { + errorIfClosed(); + checkTransactionSynchStatus(); + LockEventListener[] lockEventListener = listeners.getLockEventListeners(); + for ( int i = 0; i < lockEventListener.length; i++ ) { + lockEventListener[i].onLock( lockEvent ); + } + } + + + // persist() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void persist(String entityName, Object object) throws HibernateException { + firePersist( new PersistEvent(entityName, object, this) ); + } + + public void persist(Object object) throws HibernateException { + persist(null, object); + } + + public void persist(String entityName, Object object, Map copiedAlready) + throws HibernateException { + firePersist( copiedAlready, new PersistEvent(entityName, object, this) ); + } + + private void firePersist(Map copiedAlready, PersistEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + PersistEventListener[] persistEventListener = listeners.getPersistEventListeners(); + for ( int i = 0; i < persistEventListener.length; i++ ) { + persistEventListener[i].onPersist(event, copiedAlready); + } + } + + private void firePersist(PersistEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + PersistEventListener[] createEventListener = listeners.getPersistEventListeners(); + for ( int i = 0; i < createEventListener.length; i++ ) { + createEventListener[i].onPersist(event); + } + } + + + // persistOnFlush() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void persistOnFlush(String entityName, Object object) + throws HibernateException { + firePersistOnFlush( new PersistEvent(entityName, object, this) ); + } + + public void persistOnFlush(Object object) throws HibernateException { + persist(null, object); + } + + public void persistOnFlush(String entityName, Object object, Map copiedAlready) + throws HibernateException { + firePersistOnFlush( copiedAlready, new PersistEvent(entityName, object, this) ); + } + + private void firePersistOnFlush(Map copiedAlready, PersistEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + PersistEventListener[] persistEventListener = listeners.getPersistOnFlushEventListeners(); + for ( int i = 0; i < persistEventListener.length; i++ ) { + persistEventListener[i].onPersist(event, copiedAlready); + } + } + + private void firePersistOnFlush(PersistEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + PersistEventListener[] createEventListener = listeners.getPersistOnFlushEventListeners(); + for ( int i = 0; i < createEventListener.length; i++ ) { + createEventListener[i].onPersist(event); + } + } + + + // merge() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Object merge(String entityName, Object object) throws HibernateException { + return fireMerge( new MergeEvent(entityName, object, this) ); + } + + public Object merge(Object object) throws HibernateException { + return merge(null, object); + } + + public void merge(String entityName, Object object, Map copiedAlready) throws HibernateException { + fireMerge( copiedAlready, new MergeEvent(entityName, object, this) ); + } + + private Object fireMerge(MergeEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + MergeEventListener[] mergeEventListener = listeners.getMergeEventListeners(); + for ( int i = 0; i < mergeEventListener.length; i++ ) { + mergeEventListener[i].onMerge(event); + } + return event.getResult(); + } + + private void fireMerge(Map copiedAlready, MergeEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + MergeEventListener[] mergeEventListener = listeners.getMergeEventListeners(); + for ( int i = 0; i < mergeEventListener.length; i++ ) { + mergeEventListener[i].onMerge(event, copiedAlready); + } + } + + + // saveOrUpdateCopy() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Object saveOrUpdateCopy(String entityName, Object object) + throws HibernateException { + return fireSaveOrUpdateCopy( new MergeEvent(entityName, object, this) ); + } + + public Object saveOrUpdateCopy(Object object) throws HibernateException { + return saveOrUpdateCopy( null, object ); + } + + public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) + throws HibernateException { + return fireSaveOrUpdateCopy( new MergeEvent(entityName, object, id, this) ); + } + + public Object saveOrUpdateCopy(Object object, Serializable id) + throws HibernateException { + return saveOrUpdateCopy( null, object, id ); + } + + public void saveOrUpdateCopy(String entityName, Object object, Map copiedAlready) + throws HibernateException { + fireSaveOrUpdateCopy( copiedAlready, new MergeEvent( entityName, object, this ) ); + } + + private void fireSaveOrUpdateCopy(Map copiedAlready, MergeEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + MergeEventListener[] saveOrUpdateCopyEventListener = listeners.getSaveOrUpdateCopyEventListeners(); + for ( int i = 0; i < saveOrUpdateCopyEventListener.length; i++ ) { + saveOrUpdateCopyEventListener[i].onMerge(event, copiedAlready); + } + } + + private Object fireSaveOrUpdateCopy(MergeEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + MergeEventListener[] saveOrUpdateCopyEventListener = listeners.getSaveOrUpdateCopyEventListeners(); + for ( int i = 0; i < saveOrUpdateCopyEventListener.length; i++ ) { + saveOrUpdateCopyEventListener[i].onMerge(event); + } + return event.getResult(); + } + + + // delete() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Delete a persistent object + */ + public void delete(Object object) throws HibernateException { + fireDelete( new DeleteEvent(object, this) ); + } + + /** + * Delete a persistent object (by explicit entity name) + */ + public void delete(String entityName, Object object) throws HibernateException { + fireDelete( new DeleteEvent( entityName, object, this ) ); + } + + /** + * Delete a persistent object + */ + public void delete(String entityName, Object object, boolean isCascadeDeleteEnabled, Set transientEntities) throws HibernateException { + fireDelete( new DeleteEvent( entityName, object, isCascadeDeleteEnabled, this ), transientEntities ); + } + + private void fireDelete(DeleteEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + DeleteEventListener[] deleteEventListener = listeners.getDeleteEventListeners(); + for ( int i = 0; i < deleteEventListener.length; i++ ) { + deleteEventListener[i].onDelete( event ); + } + } + + private void fireDelete(DeleteEvent event, Set transientEntities) { + errorIfClosed(); + checkTransactionSynchStatus(); + DeleteEventListener[] deleteEventListener = listeners.getDeleteEventListeners(); + for ( int i = 0; i < deleteEventListener.length; i++ ) { + deleteEventListener[i].onDelete( event, transientEntities ); + } + } + + + // load()/get() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void load(Object object, Serializable id) throws HibernateException { + LoadEvent event = new LoadEvent(id, object, this); + fireLoad( event, LoadEventListener.RELOAD ); + } + + public Object load(Class entityClass, Serializable id) throws HibernateException { + return load( entityClass.getName(), id ); + } + + public Object load(String entityName, Serializable id) throws HibernateException { + LoadEvent event = new LoadEvent(id, entityName, false, this); + boolean success = false; + try { + fireLoad( event, LoadEventListener.LOAD ); + if ( event.getResult() == null ) { + getFactory().getEntityNotFoundDelegate().handleEntityNotFound( entityName, id ); + } + success = true; + return event.getResult(); + } + finally { + afterOperation(success); + } + } + + public Object get(Class entityClass, Serializable id) throws HibernateException { + return get( entityClass.getName(), id ); + } + + public Object get(String entityName, Serializable id) throws HibernateException { + LoadEvent event = new LoadEvent(id, entityName, false, this); + boolean success = false; + try { + fireLoad(event, LoadEventListener.GET); + success = true; + return event.getResult(); + } + finally { + afterOperation(success); + } + } + + /** + * Load the data for the object with the specified id into a newly created object. + * This is only called when lazily initializing a proxy. + * Do NOT return a proxy. + */ + public Object immediateLoad(String entityName, Serializable id) throws HibernateException { + if ( log.isDebugEnabled() ) { + EntityPersister persister = getFactory().getEntityPersister(entityName); + log.debug( "initializing proxy: " + MessageHelper.infoString( persister, id, getFactory() ) ); + } + + LoadEvent event = new LoadEvent(id, entityName, true, this); + fireLoad(event, LoadEventListener.IMMEDIATE_LOAD); + return event.getResult(); + } + + public Object internalLoad(String entityName, Serializable id, boolean eager, boolean nullable) throws HibernateException { + // todo : remove + LoadEventListener.LoadType type = nullable ? + LoadEventListener.INTERNAL_LOAD_NULLABLE : + eager ? LoadEventListener.INTERNAL_LOAD_EAGER : LoadEventListener.INTERNAL_LOAD_LAZY; + LoadEvent event = new LoadEvent(id, entityName, true, this); + fireLoad(event, type); + if ( !nullable ) { + UnresolvableObjectException.throwIfNull( event.getResult(), id, entityName ); + } + return event.getResult(); + } + + public Object load(Class entityClass, Serializable id, LockMode lockMode) throws HibernateException { + return load( entityClass.getName(), id, lockMode ); + } + + public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException { + LoadEvent event = new LoadEvent(id, entityName, lockMode, this); + fireLoad( event, LoadEventListener.LOAD ); + return event.getResult(); + } + + public Object get(Class entityClass, Serializable id, LockMode lockMode) throws HibernateException { + return get( entityClass.getName(), id, lockMode ); + } + + public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException { + LoadEvent event = new LoadEvent(id, entityName, lockMode, this); + fireLoad(event, LoadEventListener.GET); + return event.getResult(); + } + + private void fireLoad(LoadEvent event, LoadType loadType) { + errorIfClosed(); + checkTransactionSynchStatus(); + LoadEventListener[] loadEventListener = listeners.getLoadEventListeners(); + for ( int i = 0; i < loadEventListener.length; i++ ) { + loadEventListener[i].onLoad(event, loadType); + } + } + + + // refresh() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void refresh(Object object) throws HibernateException { + fireRefresh( new RefreshEvent(object, this) ); + } + + public void refresh(Object object, LockMode lockMode) throws HibernateException { + fireRefresh( new RefreshEvent(object, lockMode, this) ); + } + + public void refresh(Object object, Map refreshedAlready) throws HibernateException { + fireRefresh( refreshedAlready, new RefreshEvent(object, this) ); + } + + private void fireRefresh(RefreshEvent refreshEvent) { + errorIfClosed(); + checkTransactionSynchStatus(); + RefreshEventListener[] refreshEventListener = listeners.getRefreshEventListeners(); + for ( int i = 0; i < refreshEventListener.length; i++ ) { + refreshEventListener[i].onRefresh( refreshEvent ); + } + } + + private void fireRefresh(Map refreshedAlready, RefreshEvent refreshEvent) { + errorIfClosed(); + checkTransactionSynchStatus(); + RefreshEventListener[] refreshEventListener = listeners.getRefreshEventListeners(); + for ( int i = 0; i < refreshEventListener.length; i++ ) { + refreshEventListener[i].onRefresh( refreshEvent, refreshedAlready ); + } + } + + + // replicate() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void replicate(Object obj, ReplicationMode replicationMode) throws HibernateException { + fireReplicate( new ReplicateEvent(obj, replicationMode, this) ); + } + + public void replicate(String entityName, Object obj, ReplicationMode replicationMode) + throws HibernateException { + fireReplicate( new ReplicateEvent(entityName, obj, replicationMode, this) ); + } + + private void fireReplicate(ReplicateEvent event) { + errorIfClosed(); + checkTransactionSynchStatus(); + ReplicateEventListener[] replicateEventListener = listeners.getReplicateEventListeners(); + for ( int i = 0; i < replicateEventListener.length; i++ ) { + replicateEventListener[i].onReplicate(event); + } + } + + + // evict() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * remove any hard references to the entity that are held by the infrastructure + * (references held by application or other persistant instances are okay) + */ + public void evict(Object object) throws HibernateException { + fireEvict( new EvictEvent(object, this) ); + } + + private void fireEvict(EvictEvent evictEvent) { + errorIfClosed(); + checkTransactionSynchStatus(); + EvictEventListener[] evictEventListener = listeners.getEvictEventListeners(); + for ( int i = 0; i < evictEventListener.length; i++ ) { + evictEventListener[i].onEvict( evictEvent ); + } + } + + /** + * detect in-memory changes, determine if the changes are to tables + * named in the query and, if so, complete execution the flush + */ + protected boolean autoFlushIfRequired(Set querySpaces) throws HibernateException { + errorIfClosed(); + if ( ! isTransactionInProgress() ) { + // do not auto-flush while outside a transaction + return false; + } + AutoFlushEvent event = new AutoFlushEvent(querySpaces, this); + AutoFlushEventListener[] autoFlushEventListener = listeners.getAutoFlushEventListeners(); + for ( int i = 0; i < autoFlushEventListener.length; i++ ) { + autoFlushEventListener[i].onAutoFlush(event); + } + return event.isFlushRequired(); + } + + public boolean isDirty() throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + log.debug("checking session dirtiness"); + if ( actionQueue.areInsertionsOrDeletionsQueued() ) { + log.debug("session dirty (scheduled updates and insertions)"); + return true; + } + else { + DirtyCheckEvent event = new DirtyCheckEvent(this); + DirtyCheckEventListener[] dirtyCheckEventListener = listeners.getDirtyCheckEventListeners(); + for ( int i = 0; i < dirtyCheckEventListener.length; i++ ) { + dirtyCheckEventListener[i].onDirtyCheck(event); + } + return event.isDirty(); + } + } + + public void flush() throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + if ( persistenceContext.getCascadeLevel() > 0 ) { + throw new HibernateException("Flush during cascade is dangerous"); + } + FlushEventListener[] flushEventListener = listeners.getFlushEventListeners(); + for ( int i = 0; i < flushEventListener.length; i++ ) { + flushEventListener[i].onFlush( new FlushEvent(this) ); + } + } + + public void forceFlush(EntityEntry entityEntry) throws HibernateException { + errorIfClosed(); + if ( log.isDebugEnabled() ) { + log.debug( + "flushing to force deletion of re-saved object: " + + MessageHelper.infoString( entityEntry.getPersister(), entityEntry.getId(), getFactory() ) + ); + } + + if ( persistenceContext.getCascadeLevel() > 0 ) { + throw new ObjectDeletedException( + "deleted object would be re-saved by cascade (remove deleted object from associations)", + entityEntry.getId(), + entityEntry.getPersister().getEntityName() + ); + } + + flush(); + } + + public Filter getEnabledFilter(String filterName) { + checkTransactionSynchStatus(); + return (Filter) enabledFilters.get(filterName); + } + + public Filter enableFilter(String filterName) { + errorIfClosed(); + checkTransactionSynchStatus(); + FilterImpl filter = new FilterImpl( factory.getFilterDefinition(filterName) ); + enabledFilters.put(filterName, filter); + return filter; + } + + public void disableFilter(String filterName) { + errorIfClosed(); + checkTransactionSynchStatus(); + enabledFilters.remove(filterName); + } + + public Object getFilterParameterValue(String filterParameterName) { + errorIfClosed(); + checkTransactionSynchStatus(); + String[] parsed = parseFilterParameterName(filterParameterName); + FilterImpl filter = (FilterImpl) enabledFilters.get( parsed[0] ); + if (filter == null) { + throw new IllegalArgumentException("Filter [" + parsed[0] + "] currently not enabled"); + } + return filter.getParameter( parsed[1] ); + } + + public Type getFilterParameterType(String filterParameterName) { + errorIfClosed(); + checkTransactionSynchStatus(); + String[] parsed = parseFilterParameterName(filterParameterName); + FilterDefinition filterDef = factory.getFilterDefinition( parsed[0] ); + if (filterDef == null) { + throw new IllegalArgumentException("Filter [" + parsed[0] + "] not defined"); + } + Type type = filterDef.getParameterType( parsed[1] ); + if (type == null) { + // this is an internal error of some sort... + throw new InternalError("Unable to locate type for filter parameter"); + } + return type; + } + + public Map getEnabledFilters() { + errorIfClosed(); + checkTransactionSynchStatus(); + // First, validate all the enabled filters... + //TODO: this implementation has bad performance + Iterator itr = enabledFilters.values().iterator(); + while ( itr.hasNext() ) { + final Filter filter = (Filter) itr.next(); + filter.validate(); + } + return enabledFilters; + } + + private String[] parseFilterParameterName(String filterParameterName) { + int dot = filterParameterName.indexOf('.'); + if (dot <= 0) { + throw new IllegalArgumentException("Invalid filter-parameter name format"); // TODO: what type? + } + String filterName = filterParameterName.substring(0, dot); + String parameterName = filterParameterName.substring(dot+1); + return new String[] {filterName, parameterName}; + } + + + /** + * Retrieve a list of persistent objects using a hibernate query + */ + public List find(String query) throws HibernateException { + return list( query, new QueryParameters() ); + } + + public List find(String query, Object value, Type type) throws HibernateException { + return list( query, new QueryParameters(type, value) ); + } + + public List find(String query, Object[] values, Type[] types) throws HibernateException { + return list( query, new QueryParameters(types, values) ); + } + + public List list(String query, QueryParameters queryParameters) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + queryParameters.validateParameters(); + HQLQueryPlan plan = getHQLQueryPlan( query, false ); + autoFlushIfRequired( plan.getQuerySpaces() ); + + List results = CollectionHelper.EMPTY_LIST; + boolean success = false; + + dontFlushFromFind++; //stops flush being called multiple times if this method is recursively called + try { + results = plan.performList( queryParameters, this ); + success = true; + } + finally { + dontFlushFromFind--; + afterOperation(success); + } + return results; + } + + public int executeUpdate(String query, QueryParameters queryParameters) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + queryParameters.validateParameters(); + HQLQueryPlan plan = getHQLQueryPlan( query, false ); + autoFlushIfRequired( plan.getQuerySpaces() ); + + boolean success = false; + int result = 0; + try { + result = plan.performExecuteUpdate( queryParameters, this ); + success = true; + } + finally { + afterOperation(success); + } + return result; + } + + public int executeNativeUpdate(NativeSQLQuerySpecification nativeQuerySpecification, + QueryParameters queryParameters) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + queryParameters.validateParameters(); + NativeSQLQueryPlan plan = getNativeSQLQueryPlan(nativeQuerySpecification); + + + autoFlushIfRequired( plan.getCustomQuery().getQuerySpaces() ); + + boolean success = false; + int result = 0; + try { + result = plan.performExecuteUpdate(queryParameters, this); + success = true; + } finally { + afterOperation(success); + } + return result; + } + + public Iterator iterate(String query) throws HibernateException { + return iterate( query, new QueryParameters() ); + } + + public Iterator iterate(String query, Object value, Type type) throws HibernateException { + return iterate( query, new QueryParameters(type, value) ); + } + + public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException { + return iterate( query, new QueryParameters(types, values) ); + } + + public Iterator iterate(String query, QueryParameters queryParameters) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + queryParameters.validateParameters(); + HQLQueryPlan plan = getHQLQueryPlan( query, true ); + autoFlushIfRequired( plan.getQuerySpaces() ); + + dontFlushFromFind++; //stops flush being called multiple times if this method is recursively called + try { + return plan.performIterate( queryParameters, this ); + } + finally { + dontFlushFromFind--; + } + } + + public ScrollableResults scroll(String query, QueryParameters queryParameters) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + HQLQueryPlan plan = getHQLQueryPlan( query, false ); + autoFlushIfRequired( plan.getQuerySpaces() ); + dontFlushFromFind++; + try { + return plan.performScroll( queryParameters, this ); + } + finally { + dontFlushFromFind--; + } + } + + public int delete(String query) throws HibernateException { + return delete( query, ArrayHelper.EMPTY_OBJECT_ARRAY, ArrayHelper.EMPTY_TYPE_ARRAY ); + } + + public int delete(String query, Object value, Type type) throws HibernateException { + return delete( query, new Object[]{value}, new Type[]{type} ); + } + + public int delete(String query, Object[] values, Type[] types) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + if ( query == null ) { + throw new IllegalArgumentException("attempt to perform delete-by-query with null query"); + } + + if ( log.isTraceEnabled() ) { + log.trace( "delete: " + query ); + if ( values.length != 0 ) { + log.trace( "parameters: " + StringHelper.toString( values ) ); + } + } + + List list = find( query, values, types ); + int deletionCount = list.size(); + for ( int i = 0; i < deletionCount; i++ ) { + delete( list.get( i ) ); + } + + return deletionCount; + } + + public Query createFilter(Object collection, String queryString) { + errorIfClosed(); + checkTransactionSynchStatus(); + CollectionFilterImpl filter = new CollectionFilterImpl( + queryString, + collection, + this, + getFilterQueryPlan( collection, queryString, null, false ).getParameterMetadata() + ); + filter.setComment( queryString ); + return filter; + } + + public Query getNamedQuery(String queryName) throws MappingException { + errorIfClosed(); + checkTransactionSynchStatus(); + return super.getNamedQuery(queryName); + } + + public Object instantiate(String entityName, Serializable id) throws HibernateException { + return instantiate( factory.getEntityPersister(entityName), id ); + } + + /** + * give the interceptor an opportunity to override the default instantiation + */ + public Object instantiate(EntityPersister persister, Serializable id) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + Object result = interceptor.instantiate( persister.getEntityName(), entityMode, id ); + if ( result == null ) { + result = persister.instantiate( id, entityMode ); + } + return result; + } + + public EntityMode getEntityMode() { + checkTransactionSynchStatus(); + return entityMode; + } + + public void setFlushMode(FlushMode flushMode) { + errorIfClosed(); + checkTransactionSynchStatus(); + if ( log.isTraceEnabled() ) { + log.trace("setting flush mode to: " + flushMode); + } + this.flushMode = flushMode; + } + + public FlushMode getFlushMode() { + checkTransactionSynchStatus(); + return flushMode; + } + + public CacheMode getCacheMode() { + checkTransactionSynchStatus(); + return cacheMode; + } + + public void setCacheMode(CacheMode cacheMode) { + errorIfClosed(); + checkTransactionSynchStatus(); + if ( log.isTraceEnabled() ) { + log.trace("setting cache mode to: " + cacheMode); + } + this.cacheMode= cacheMode; + } + + public Transaction getTransaction() throws HibernateException { + errorIfClosed(); + return jdbcContext.getTransaction(); + } + + public Transaction beginTransaction() throws HibernateException { + errorIfClosed(); + if ( rootSession != null ) { + // todo : should seriously consider not allowing a txn to begin from a child session + // can always route the request to the root session... + log.warn( "Transaction started on non-root session" ); + } + Transaction result = getTransaction(); + result.begin(); + return result; + } + + public void afterTransactionBegin(Transaction tx) { + errorIfClosed(); + interceptor.afterTransactionBegin(tx); + } + + public EntityPersister getEntityPersister(final String entityName, final Object object) { + errorIfClosed(); + if (entityName==null) { + return factory.getEntityPersister( guessEntityName( object ) ); + } + else { + // try block is a hack around fact that currently tuplizers are not + // given the opportunity to resolve a subclass entity name. this + // allows the (we assume custom) interceptor the ability to + // influence this decision if we were not able to based on the + // given entityName + try { + return factory.getEntityPersister( entityName ) + .getSubclassEntityPersister( object, getFactory(), entityMode ); + } + catch( HibernateException e ) { + try { + return getEntityPersister( null, object ); + } + catch( HibernateException e2 ) { + throw e; + } + } + } + } + + // not for internal use: + public Serializable getIdentifier(Object object) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + if ( object instanceof HibernateProxy ) { + LazyInitializer li = ( (HibernateProxy) object ).getHibernateLazyInitializer(); + if ( li.getSession() != this ) { + throw new TransientObjectException( "The proxy was not associated with this session" ); + } + return li.getIdentifier(); + } + else { + EntityEntry entry = persistenceContext.getEntry(object); + if ( entry == null ) { + throw new TransientObjectException( "The instance was not associated with this session" ); + } + return entry.getId(); + } + } + + /** + * Get the id value for an object that is actually associated with the session. This + * is a bit stricter than getEntityIdentifierIfNotUnsaved(). + */ + public Serializable getContextEntityIdentifier(Object object) { + errorIfClosed(); + if ( object instanceof HibernateProxy ) { + return getProxyIdentifier(object); + } + else { + EntityEntry entry = persistenceContext.getEntry(object); + return entry != null ? entry.getId() : null; + } + } + + private Serializable getProxyIdentifier(Object proxy) { + return ( (HibernateProxy) proxy ).getHibernateLazyInitializer().getIdentifier(); + } + + public Collection filter(Object collection, String filter) throws HibernateException { + return listFilter( collection, filter, new QueryParameters( new Type[1], new Object[1] ) ); + } + + public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException { + return listFilter( collection, filter, new QueryParameters( new Type[]{null, type}, new Object[]{null, value} ) ); + } + + public Collection filter(Object collection, String filter, Object[] values, Type[] types) + throws HibernateException { + Object[] vals = new Object[values.length + 1]; + Type[] typs = new Type[types.length + 1]; + System.arraycopy( values, 0, vals, 1, values.length ); + System.arraycopy( types, 0, typs, 1, types.length ); + return listFilter( collection, filter, new QueryParameters( typs, vals ) ); + } + + private FilterQueryPlan getFilterQueryPlan( + Object collection, + String filter, + QueryParameters parameters, + boolean shallow) throws HibernateException { + if ( collection == null ) { + throw new NullPointerException( "null collection passed to filter" ); + } + + CollectionEntry entry = persistenceContext.getCollectionEntryOrNull( collection ); + final CollectionPersister roleBeforeFlush = (entry == null) ? null : entry.getLoadedPersister(); + + FilterQueryPlan plan = null; + if ( roleBeforeFlush == null ) { + // if it was previously unreferenced, we need to flush in order to + // get its state into the database in order to execute query + flush(); + entry = persistenceContext.getCollectionEntryOrNull( collection ); + CollectionPersister roleAfterFlush = (entry == null) ? null : entry.getLoadedPersister(); + if ( roleAfterFlush == null ) { + throw new QueryException( "The collection was unreferenced" ); + } + plan = factory.getQueryPlanCache().getFilterQueryPlan( filter, roleAfterFlush.getRole(), shallow, getEnabledFilters() ); + } + else { + // otherwise, we only need to flush if there are in-memory changes + // to the queried tables + plan = factory.getQueryPlanCache().getFilterQueryPlan( filter, roleBeforeFlush.getRole(), shallow, getEnabledFilters() ); + if ( autoFlushIfRequired( plan.getQuerySpaces() ) ) { + // might need to run a different filter entirely after the flush + // because the collection role may have changed + entry = persistenceContext.getCollectionEntryOrNull( collection ); + CollectionPersister roleAfterFlush = (entry == null) ? null : entry.getLoadedPersister(); + if ( roleBeforeFlush != roleAfterFlush ) { + if ( roleAfterFlush == null ) { + throw new QueryException( "The collection was dereferenced" ); + } + plan = factory.getQueryPlanCache().getFilterQueryPlan( filter, roleAfterFlush.getRole(), shallow, getEnabledFilters() ); + } + } + } + + if ( parameters != null ) { + parameters.getPositionalParameterValues()[0] = entry.getLoadedKey(); + parameters.getPositionalParameterTypes()[0] = entry.getLoadedPersister().getKeyType(); + } + + return plan; + } + + public List listFilter(Object collection, String filter, QueryParameters queryParameters) + throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + FilterQueryPlan plan = getFilterQueryPlan( collection, filter, queryParameters, false ); + List results = CollectionHelper.EMPTY_LIST; + + boolean success = false; + dontFlushFromFind++; //stops flush being called multiple times if this method is recursively called + try { + results = plan.performList( queryParameters, this ); + success = true; + } + finally { + dontFlushFromFind--; + afterOperation(success); + } + return results; + } + + public Iterator iterateFilter(Object collection, String filter, QueryParameters queryParameters) + throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + FilterQueryPlan plan = getFilterQueryPlan( collection, filter, queryParameters, true ); + return plan.performIterate( queryParameters, this ); + } + + public Criteria createCriteria(Class persistentClass, String alias) { + errorIfClosed(); + checkTransactionSynchStatus(); + return new CriteriaImpl( persistentClass.getName(), alias, this ); + } + + public Criteria createCriteria(String entityName, String alias) { + errorIfClosed(); + checkTransactionSynchStatus(); + return new CriteriaImpl(entityName, alias, this); + } + + public Criteria createCriteria(Class persistentClass) { + errorIfClosed(); + checkTransactionSynchStatus(); + return new CriteriaImpl( persistentClass.getName(), this ); + } + + public Criteria createCriteria(String entityName) { + errorIfClosed(); + checkTransactionSynchStatus(); + return new CriteriaImpl(entityName, this); + } + + public ScrollableResults scroll(CriteriaImpl criteria, ScrollMode scrollMode) { + errorIfClosed(); + checkTransactionSynchStatus(); + String entityName = criteria.getEntityOrClassName(); + CriteriaLoader loader = new CriteriaLoader( + getOuterJoinLoadable(entityName), + factory, + criteria, + entityName, + getEnabledFilters() + ); + autoFlushIfRequired( loader.getQuerySpaces() ); + dontFlushFromFind++; + try { + return loader.scroll(this, scrollMode); + } + finally { + dontFlushFromFind--; + } + } + + public List list(CriteriaImpl criteria) throws HibernateException { + errorIfClosed(); + checkTransactionSynchStatus(); + String[] implementors = factory.getImplementors( criteria.getEntityOrClassName() ); + int size = implementors.length; + + CriteriaLoader[] loaders = new CriteriaLoader[size]; + Set spaces = new HashSet(); + for( int i=0; i entityName, but that would mean nodeName being distinct + entity = ( (Element) object ).getName(); + } + else { + entity = object.getClass().getName(); + } + } + return entity; + } + + public void cancelQuery() throws HibernateException { + errorIfClosed(); + getBatcher().cancelLastQuery(); + } + + public Interceptor getInterceptor() { + checkTransactionSynchStatus(); + return interceptor; + } + + public int getDontFlushFromFind() { + return dontFlushFromFind; + } + + public String toString() { + StringBuffer buf = new StringBuffer(500) + .append( "SessionImpl(" ); + if ( !isClosed() ) { + buf.append(persistenceContext) + .append(";") + .append(actionQueue); + } + else { + buf.append(""); + } + return buf.append(')').toString(); + } + + public EventListeners getListeners() { + return listeners; + } + + public ActionQueue getActionQueue() { + errorIfClosed(); + checkTransactionSynchStatus(); + return actionQueue; + } + + public PersistenceContext getPersistenceContext() { + errorIfClosed(); + checkTransactionSynchStatus(); + return persistenceContext; + } + + public SessionStatistics getStatistics() { + checkTransactionSynchStatus(); + return new SessionStatisticsImpl(this); + } + + public boolean isEventSource() { + checkTransactionSynchStatus(); + return true; + } + + public void setReadOnly(Object entity, boolean readOnly) { + errorIfClosed(); + checkTransactionSynchStatus(); + persistenceContext.setReadOnly(entity, readOnly); + } + + public void afterScrollOperation() { + // nothing to do in a stateful session + } + + public String getFetchProfile() { + checkTransactionSynchStatus(); + return fetchProfile; + } + + public JDBCContext getJDBCContext() { + errorIfClosed(); + checkTransactionSynchStatus(); + return jdbcContext; + } + + public void setFetchProfile(String fetchProfile) { + errorIfClosed(); + checkTransactionSynchStatus(); + this.fetchProfile = fetchProfile; + } + + private void checkTransactionSynchStatus() { + if ( jdbcContext != null && !isClosed() ) { + jdbcContext.registerSynchronizationIfPossible(); + } + } + + /** + * Used by JDK serialization... + * + * @param ois The input stream from which we are being read... + * @throws IOException Indicates a general IO stream exception + * @throws ClassNotFoundException Indicates a class resolution issue + */ + private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { + log.trace( "deserializing session" ); + + boolean isRootSession = ois.readBoolean(); + connectionReleaseMode = ConnectionReleaseMode.parse( ( String ) ois.readObject() ); + entityMode = EntityMode.parse( ( String ) ois.readObject() ); + autoClear = ois.readBoolean(); + flushMode = FlushMode.parse( ( String ) ois.readObject() ); + cacheMode = CacheMode.parse( ( String ) ois.readObject() ); + flushBeforeCompletionEnabled = ois.readBoolean(); + autoCloseSessionEnabled = ois.readBoolean(); + fetchProfile = ( String ) ois.readObject(); + interceptor = ( Interceptor ) ois.readObject(); + + factory = SessionFactoryImpl.deserialize( ois ); + listeners = factory.getEventListeners(); + + if ( isRootSession ) { + jdbcContext = JDBCContext.deserialize( ois, this, interceptor ); + } + + persistenceContext = StatefulPersistenceContext.deserialize( ois, this ); + actionQueue = ActionQueue.deserialize( ois, this ); + + enabledFilters = ( Map ) ois.readObject(); + childSessionsByEntityMode = ( Map ) ois.readObject(); + + Iterator iter = enabledFilters.values().iterator(); + while ( iter.hasNext() ) { + ( ( FilterImpl ) iter.next() ).afterDeserialize(factory); + } + + if ( isRootSession && childSessionsByEntityMode != null ) { + iter = childSessionsByEntityMode.values().iterator(); + while ( iter.hasNext() ) { + final SessionImpl child = ( ( SessionImpl ) iter.next() ); + child.rootSession = this; + child.jdbcContext = this.jdbcContext; + } + } + } + + /** + * Used by JDK serialization... + * + * @param oos The output stream to which we are being written... + * @throws IOException Indicates a general IO stream exception + */ + private void writeObject(ObjectOutputStream oos) throws IOException { + if ( !jdbcContext.getConnectionManager().isReadyForSerialization() ) { + throw new IllegalStateException( "Cannot serialize a session while connected" ); + } + + log.trace( "serializing session" ); + + oos.writeBoolean( rootSession == null ); + oos.writeObject( connectionReleaseMode.toString() ); + oos.writeObject( entityMode.toString() ); + oos.writeBoolean( autoClear ); + oos.writeObject( flushMode.toString() ); + oos.writeObject( cacheMode.toString() ); + oos.writeBoolean( flushBeforeCompletionEnabled ); + oos.writeBoolean( autoCloseSessionEnabled ); + oos.writeObject( fetchProfile ); + // we need to writeObject() on this since interceptor is user defined + oos.writeObject( interceptor ); + + factory.serialize( oos ); + + if ( rootSession == null ) { + jdbcContext.serialize( oos ); + } + + persistenceContext.serialize( oos ); + actionQueue.serialize( oos ); + + // todo : look at optimizing these... + oos.writeObject( enabledFilters ); + oos.writeObject( childSessionsByEntityMode ); + } +} diff --git a/src/org/hibernate/impl/StatelessSessionImpl.java b/src/org/hibernate/impl/StatelessSessionImpl.java new file mode 100755 index 0000000000..ec7764e7ec --- /dev/null +++ b/src/org/hibernate/impl/StatelessSessionImpl.java @@ -0,0 +1,630 @@ +//$Id$ +package org.hibernate.impl; + +import java.io.Serializable; +import java.sql.Connection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.CacheMode; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.Criteria; +import org.hibernate.EmptyInterceptor; +import org.hibernate.EntityMode; +import org.hibernate.FlushMode; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.LockMode; +import org.hibernate.MappingException; +import org.hibernate.ScrollMode; +import org.hibernate.ScrollableResults; +import org.hibernate.SessionException; +import org.hibernate.StatelessSession; +import org.hibernate.Transaction; +import org.hibernate.UnresolvableObjectException; +import org.hibernate.cache.CacheKey; +import org.hibernate.collection.PersistentCollection; +import org.hibernate.engine.EntityKey; +import org.hibernate.engine.PersistenceContext; +import org.hibernate.engine.QueryParameters; +import org.hibernate.engine.StatefulPersistenceContext; +import org.hibernate.engine.Versioning; +import org.hibernate.engine.query.HQLQueryPlan; +import org.hibernate.engine.query.NativeSQLQueryPlan; +import org.hibernate.engine.query.sql.NativeSQLQuerySpecification; +import org.hibernate.event.EventListeners; +import org.hibernate.id.IdentifierGeneratorFactory; +import org.hibernate.jdbc.Batcher; +import org.hibernate.jdbc.JDBCContext; +import org.hibernate.loader.criteria.CriteriaLoader; +import org.hibernate.loader.custom.CustomLoader; +import org.hibernate.loader.custom.CustomQuery; +import org.hibernate.persister.entity.EntityPersister; +import org.hibernate.persister.entity.OuterJoinLoadable; +import org.hibernate.pretty.MessageHelper; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.type.Type; +import org.hibernate.util.CollectionHelper; + +/** + * @author Gavin King + */ +public class StatelessSessionImpl extends AbstractSessionImpl + implements JDBCContext.Context, StatelessSession { + + private static final Log log = LogFactory.getLog( StatelessSessionImpl.class ); + + private JDBCContext jdbcContext; + private PersistenceContext temporaryPersistenceContext = new StatefulPersistenceContext( this ); + + StatelessSessionImpl(Connection connection, SessionFactoryImpl factory) { + super( factory ); + this.jdbcContext = new JDBCContext( this, connection, EmptyInterceptor.INSTANCE ); + } + + + // inserts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Serializable insert(Object entity) { + errorIfClosed(); + return insert(null, entity); + } + + public Serializable insert(String entityName, Object entity) { + errorIfClosed(); + EntityPersister persister = getEntityPersister(entityName, entity); + Serializable id = persister.getIdentifierGenerator().generate(this, entity); + Object[] state = persister.getPropertyValues(entity, EntityMode.POJO); + if ( persister.isVersioned() ) { + boolean substitute = Versioning.seedVersion(state, persister.getVersionProperty(), persister.getVersionType(), this); + if ( substitute ) { + persister.setPropertyValues( entity, state, EntityMode.POJO ); + } + } + if ( id == IdentifierGeneratorFactory.POST_INSERT_INDICATOR ) { + id = persister.insert(state, entity, this); + } + else { + persister.insert(id, state, entity, this); + } + persister.setIdentifier(entity, id, EntityMode.POJO); + return id; + } + + + // deletes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void delete(Object entity) { + errorIfClosed(); + delete(null, entity); + } + + public void delete(String entityName, Object entity) { + errorIfClosed(); + EntityPersister persister = getEntityPersister(entityName, entity); + Serializable id = persister.getIdentifier(entity, EntityMode.POJO); + Object version = persister.getVersion(entity, EntityMode.POJO); + persister.delete(id, version, entity, this); + } + + + // updates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void update(Object entity) { + errorIfClosed(); + update(null, entity); + } + + public void update(String entityName, Object entity) { + errorIfClosed(); + EntityPersister persister = getEntityPersister(entityName, entity); + Serializable id = persister.getIdentifier(entity, EntityMode.POJO); + Object[] state = persister.getPropertyValues(entity, EntityMode.POJO); + Object oldVersion; + if ( persister.isVersioned() ) { + oldVersion = persister.getVersion(entity, EntityMode.POJO); + Object newVersion = Versioning.increment( oldVersion, persister.getVersionType(), this ); + Versioning.setVersion(state, newVersion, persister); + persister.setPropertyValues(entity, state, EntityMode.POJO); + } + else { + oldVersion = null; + } + persister.update(id, state, null, false, null, oldVersion, entity, null, this); + } + + + // loading ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public Object get(Class entityClass, Serializable id) { + return get( entityClass.getName(), id ); + } + + public Object get(Class entityClass, Serializable id, LockMode lockMode) { + return get( entityClass.getName(), id, lockMode ); + } + + public Object get(String entityName, Serializable id) { + return get(entityName, id, LockMode.NONE); + } + + public Object get(String entityName, Serializable id, LockMode lockMode) { + errorIfClosed(); + Object result = getFactory().getEntityPersister(entityName) + .load(id, null, lockMode, this); + temporaryPersistenceContext.clear(); + return result; + } + + public void refresh(Object entity) { + refresh( bestGuessEntityName( entity ), entity, LockMode.NONE ); + } + + public void refresh(String entityName, Object entity) { + refresh( entityName, entity, LockMode.NONE ); + } + + public void refresh(Object entity, LockMode lockMode) { + refresh( bestGuessEntityName( entity ), entity, lockMode ); + } + + public void refresh(String entityName, Object entity, LockMode lockMode) { + final EntityPersister persister = this.getEntityPersister( entityName, entity ); + final Serializable id = persister.getIdentifier( entity, getEntityMode() ); + if ( log.isTraceEnabled() ) { + log.trace( + "refreshing transient " + + MessageHelper.infoString( persister, id, this.getFactory() ) + ); + } + // TODO : can this ever happen??? +// EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); +// if ( source.getPersistenceContext().getEntry( key ) != null ) { +// throw new PersistentObjectException( +// "attempted to refresh transient instance when persistent " + +// "instance was already associated with the Session: " + +// MessageHelper.infoString( persister, id, source.getFactory() ) +// ); +// } + + if ( persister.hasCache() ) { + final CacheKey ck = new CacheKey( + id, + persister.getIdentifierType(), + persister.getRootEntityName(), + this.getEntityMode(), + this.getFactory() + ); + persister.getCache().remove(ck); + } + + String previousFetchProfile = this.getFetchProfile(); + Object result = null; + try { + this.setFetchProfile( "refresh" ); + result = persister.load( id, entity, lockMode, this ); + } + finally { + this.setFetchProfile( previousFetchProfile ); + } + UnresolvableObjectException.throwIfNull( result, id, persister.getEntityName() ); + } + + public Object immediateLoad(String entityName, Serializable id) + throws HibernateException { + throw new SessionException("proxies cannot be fetched by a stateless session"); + } + + public void initializeCollection( + PersistentCollection collection, + boolean writing) throws HibernateException { + throw new SessionException("collections cannot be fetched by a stateless session"); + } + + public Object instantiate( + String entityName, + Serializable id) throws HibernateException { + errorIfClosed(); + return getFactory().getEntityPersister( entityName ) + .instantiate( id, EntityMode.POJO ); + } + + public Object internalLoad( + String entityName, + Serializable id, + boolean eager, + boolean nullable) throws HibernateException { + errorIfClosed(); + EntityPersister persister = getFactory().getEntityPersister(entityName); + if ( !eager && persister.hasProxy() ) { + return persister.createProxy(id, this); + } + Object loaded = temporaryPersistenceContext.getEntity( new EntityKey(id, persister, EntityMode.POJO) ); + //TODO: if not loaded, throw an exception + return loaded==null ? get( entityName, id ) : loaded; + } + + public Iterator iterate(String query, QueryParameters queryParameters) throws HibernateException { + throw new UnsupportedOperationException(); + } + + public Iterator iterateFilter(Object collection, String filter, QueryParameters queryParameters) + throws HibernateException { + throw new UnsupportedOperationException(); + } + + public List listFilter(Object collection, String filter, QueryParameters queryParameters) + throws HibernateException { + throw new UnsupportedOperationException(); + } + + + public boolean isOpen() { + return !isClosed(); + } + + public void close() { + managedClose(); + } + + public ConnectionReleaseMode getConnectionReleaseMode() { + return factory.getSettings().getConnectionReleaseMode(); + } + + public boolean isAutoCloseSessionEnabled() { + return factory.getSettings().isAutoCloseSessionEnabled(); + } + + public boolean isFlushBeforeCompletionEnabled() { + return true; + } + + public boolean isFlushModeNever() { + return false; + } + + public void managedClose() { + if ( isClosed() ) { + throw new SessionException( "Session was already closed!" ); + } + jdbcContext.getConnectionManager().close(); + setClosed(); + } + + public void managedFlush() { + errorIfClosed(); + getBatcher().executeBatch(); + } + + public boolean shouldAutoClose() { + return isAutoCloseSessionEnabled() && !isClosed(); + } + + public void afterTransactionCompletion(boolean successful, Transaction tx) {} + + public void beforeTransactionCompletion(Transaction tx) {} + + public String bestGuessEntityName(Object object) { + if (object instanceof HibernateProxy) { + object = ( (HibernateProxy) object ).getHibernateLazyInitializer().getImplementation(); + } + return guessEntityName(object); + } + + public Connection connection() { + errorIfClosed(); + return jdbcContext.borrowConnection(); + } + + public int executeUpdate(String query, QueryParameters queryParameters) + throws HibernateException { + errorIfClosed(); + queryParameters.validateParameters(); + HQLQueryPlan plan = getHQLQueryPlan( query, false ); + boolean success = false; + int result = 0; + try { + result = plan.performExecuteUpdate( queryParameters, this ); + success = true; + } + finally { + afterOperation(success); + } + temporaryPersistenceContext.clear(); + return result; + } + + public Batcher getBatcher() { + errorIfClosed(); + return jdbcContext.getConnectionManager() + .getBatcher(); + } + + public CacheMode getCacheMode() { + return CacheMode.IGNORE; + } + + public int getDontFlushFromFind() { + return 0; + } + + public Map getEnabledFilters() { + return CollectionHelper.EMPTY_MAP; + } + + public Serializable getContextEntityIdentifier(Object object) { + errorIfClosed(); + return null; + } + + public EntityMode getEntityMode() { + return EntityMode.POJO; + } + + public EntityPersister getEntityPersister(String entityName, Object object) + throws HibernateException { + errorIfClosed(); + if ( entityName==null ) { + return factory.getEntityPersister( guessEntityName( object ) ); + } + else { + return factory.getEntityPersister( entityName ) + .getSubclassEntityPersister( object, getFactory(), EntityMode.POJO ); + } + } + + public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException { + errorIfClosed(); + return null; + } + + public Type getFilterParameterType(String filterParameterName) { + throw new UnsupportedOperationException(); + } + + public Object getFilterParameterValue(String filterParameterName) { + throw new UnsupportedOperationException(); + } + + public FlushMode getFlushMode() { + return FlushMode.COMMIT; + } + + public Interceptor getInterceptor() { + return EmptyInterceptor.INSTANCE; + } + + public EventListeners getListeners() { + throw new UnsupportedOperationException(); + } + + public PersistenceContext getPersistenceContext() { + return temporaryPersistenceContext; + } + + public long getTimestamp() { + throw new UnsupportedOperationException(); + } + + public String guessEntityName(Object entity) throws HibernateException { + errorIfClosed(); + return entity.getClass().getName(); + } + + + public boolean isConnected() { + return jdbcContext.getConnectionManager().isCurrentlyConnected(); + } + + public boolean isTransactionInProgress() { + return jdbcContext.isTransactionInProgress(); + } + + public void setAutoClear(boolean enabled) { + throw new UnsupportedOperationException(); + } + + public void setCacheMode(CacheMode cm) { + throw new UnsupportedOperationException(); + } + + public void setFlushMode(FlushMode fm) { + throw new UnsupportedOperationException(); + } + + public Transaction getTransaction() throws HibernateException { + errorIfClosed(); + return jdbcContext.getTransaction(); + } + + public Transaction beginTransaction() throws HibernateException { + errorIfClosed(); + Transaction result = getTransaction(); + result.begin(); + return result; + } + + public boolean isEventSource() { + return false; + } + +///////////////////////////////////////////////////////////////////////////////////////////////////// + + //TODO: COPY/PASTE FROM SessionImpl, pull up! + + public List list(String query, QueryParameters queryParameters) throws HibernateException { + errorIfClosed(); + queryParameters.validateParameters(); + HQLQueryPlan plan = getHQLQueryPlan( query, false ); + boolean success = false; + List results = CollectionHelper.EMPTY_LIST; + try { + results = plan.performList( queryParameters, this ); + success = true; + } + finally { + afterOperation(success); + } + temporaryPersistenceContext.clear(); + return results; + } + + public void afterOperation(boolean success) { + if ( !jdbcContext.isTransactionInProgress() ) { + jdbcContext.afterNontransactionalQuery(success); + } + } + + public Criteria createCriteria(Class persistentClass, String alias) { + errorIfClosed(); + return new CriteriaImpl( persistentClass.getName(), alias, this ); + } + + public Criteria createCriteria(String entityName, String alias) { + errorIfClosed(); + return new CriteriaImpl(entityName, alias, this); + } + + public Criteria createCriteria(Class persistentClass) { + errorIfClosed(); + return new CriteriaImpl( persistentClass.getName(), this ); + } + + public Criteria createCriteria(String entityName) { + errorIfClosed(); + return new CriteriaImpl(entityName, this); + } + + public ScrollableResults scroll(CriteriaImpl criteria, ScrollMode scrollMode) { + errorIfClosed(); + String entityName = criteria.getEntityOrClassName(); + CriteriaLoader loader = new CriteriaLoader( + getOuterJoinLoadable(entityName), + factory, + criteria, + entityName, + getEnabledFilters() + ); + return loader.scroll(this, scrollMode); + } + + public List list(CriteriaImpl criteria) throws HibernateException { + errorIfClosed(); + String[] implementors = factory.getImplementors( criteria.getEntityOrClassName() ); + int size = implementors.length; + + CriteriaLoader[] loaders = new CriteriaLoader[size]; + for( int i=0; i + + +

    + This package contains implementations of the + central Hibernate APIs, especially the + Hibernate session. + + diff --git a/src/org/hibernate/intercept/AbstractFieldInterceptor.java b/src/org/hibernate/intercept/AbstractFieldInterceptor.java new file mode 100644 index 0000000000..73317e93b6 --- /dev/null +++ b/src/org/hibernate/intercept/AbstractFieldInterceptor.java @@ -0,0 +1,103 @@ +package org.hibernate.intercept; + +import org.hibernate.engine.SessionImplementor; +import org.hibernate.LazyInitializationException; + +import java.util.Set; +import java.io.Serializable; + +/** + * @author Steve Ebersole + */ +public abstract class AbstractFieldInterceptor implements FieldInterceptor, Serializable { + + private transient SessionImplementor session; + private Set uninitializedFields; + private final String entityName; + + private transient boolean initializing; + private boolean dirty; + + protected AbstractFieldInterceptor(SessionImplementor session, Set uninitializedFields, String entityName) { + this.session = session; + this.uninitializedFields = uninitializedFields; + this.entityName = entityName; + } + + + // FieldInterceptor impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public final void setSession(SessionImplementor session) { + this.session = session; + } + + public final boolean isInitialized() { + return uninitializedFields == null || uninitializedFields.size() == 0; + } + + public final boolean isInitialized(String field) { + return uninitializedFields == null || !uninitializedFields.contains( field ); + } + + public final void dirty() { + dirty = true; + } + + public final boolean isDirty() { + return dirty; + } + + public final void clearDirty() { + dirty = false; + } + + + // subclass accesses ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected final Object intercept(Object target, String fieldName, Object value) { + if ( initializing ) { + return value; + } + + if ( uninitializedFields != null && uninitializedFields.contains( fieldName ) ) { + if ( session == null ) { + throw new LazyInitializationException( "entity with lazy properties is not associated with a session" ); + } + else if ( !session.isOpen() || !session.isConnected() ) { + throw new LazyInitializationException( "session is not connected" ); + } + + final Object result; + initializing = true; + try { + result = ( ( LazyPropertyInitializer ) session.getFactory() + .getEntityPersister( entityName ) ) + .initializeLazyProperty( fieldName, target, session ); + } + finally { + initializing = false; + } + uninitializedFields = null; //let's assume that there is only one lazy fetch group, for now! + return result; + } + else { + return value; + } + } + + public final SessionImplementor getSession() { + return session; + } + + public final Set getUninitializedFields() { + return uninitializedFields; + } + + public final String getEntityName() { + return entityName; + } + + public final boolean isInitializing() { + return initializing; + } +} diff --git a/src/org/hibernate/intercept/FieldInterceptionHelper.java b/src/org/hibernate/intercept/FieldInterceptionHelper.java new file mode 100644 index 0000000000..b8bded0dcc --- /dev/null +++ b/src/org/hibernate/intercept/FieldInterceptionHelper.java @@ -0,0 +1,93 @@ +package org.hibernate.intercept; + +import org.hibernate.engine.SessionImplementor; +import org.hibernate.intercept.cglib.CGLIBHelper; +import org.hibernate.intercept.javassist.JavassistHelper; + +import java.util.Set; + +/** + * Helper class for dealing with enhanced entity classes. + * + * @author Steve Ebersole + */ +public class FieldInterceptionHelper { + + // VERY IMPORTANT!!!! - This class needs to be free of any static references + // to any CGLIB or Javassist classes. Otherwise, users will always need both + // on their classpaths no matter which (if either) they use. + // + // Another option here would be to remove the Hibernate.isPropertyInitialized() + // method and have the users go through the SessionFactory to get this information. + + private FieldInterceptionHelper() { + } + + public static boolean isInstrumented(Class entityClass) { + Class[] definedInterfaces = entityClass.getInterfaces(); + for ( int i = 0; i < definedInterfaces.length; i++ ) { + if ( "net.sf.cglib.transform.impl.InterceptFieldEnabled".equals( definedInterfaces[i].getName() ) + || "org.hibernate.bytecode.javassist.FieldHandled".equals( definedInterfaces[i].getName() ) ) { + return true; + } + } + return false; + } + + public static boolean isInstrumented(Object entity) { + return entity != null && isInstrumented( entity.getClass() ); + } + + public static FieldInterceptor extractFieldInterceptor(Object entity) { + if ( entity == null ) { + return null; + } + Class[] definedInterfaces = entity.getClass().getInterfaces(); + for ( int i = 0; i < definedInterfaces.length; i++ ) { + if ( "net.sf.cglib.transform.impl.InterceptFieldEnabled".equals( definedInterfaces[i].getName() ) ) { + // we have a CGLIB enhanced entity + return CGLIBHelper.extractFieldInterceptor( entity ); + } + else if ( "org.hibernate.bytecode.javassist.FieldHandled".equals( definedInterfaces[i].getName() ) ) { + // we have a Javassist enhanced entity + return JavassistHelper.extractFieldInterceptor( entity ); + } + } + return null; + } + + public static FieldInterceptor injectFieldInterceptor( + Object entity, + String entityName, + Set uninitializedFieldNames, + SessionImplementor session) { + if ( entity != null ) { + Class[] definedInterfaces = entity.getClass().getInterfaces(); + for ( int i = 0; i < definedInterfaces.length; i++ ) { + if ( "net.sf.cglib.transform.impl.InterceptFieldEnabled".equals( definedInterfaces[i].getName() ) ) { + // we have a CGLIB enhanced entity + return CGLIBHelper.injectFieldInterceptor( entity, entityName, uninitializedFieldNames, session ); + } + else if ( "org.hibernate.bytecode.javassist.FieldHandled".equals( definedInterfaces[i].getName() ) ) { + // we have a Javassist enhanced entity + return JavassistHelper.injectFieldInterceptor( entity, entityName, uninitializedFieldNames, session ); + } + } + } + return null; + } + + public static void clearDirty(Object entity) { + FieldInterceptor interceptor = extractFieldInterceptor( entity ); + if ( interceptor != null ) { + interceptor.clearDirty(); + } + } + + public static void markDirty(Object entity) { + FieldInterceptor interceptor = extractFieldInterceptor( entity ); + if ( interceptor != null ) { + interceptor.dirty(); + } + } +} diff --git a/src/org/hibernate/intercept/FieldInterceptor.java b/src/org/hibernate/intercept/FieldInterceptor.java new file mode 100755 index 0000000000..2e76388358 --- /dev/null +++ b/src/org/hibernate/intercept/FieldInterceptor.java @@ -0,0 +1,50 @@ +package org.hibernate.intercept; + +import org.hibernate.engine.SessionImplementor; + +/** + * Contract for field interception handlers. + * + * @author Steve Ebersole + */ +public interface FieldInterceptor { + + /** + * Use to associate the entity to which we are bound to the given session. + * + * @param session The session to which we are now associated. + */ + public void setSession(SessionImplementor session); + + /** + * Is the entity to which we are bound completely initialized? + * + * @return True if the entity is initialized; otherwise false. + */ + public boolean isInitialized(); + + /** + * The the given field initialized for the entity to which we are bound? + * + * @param field The name of the field to check + * @return True if the given field is initialized; otherwise false. + */ + public boolean isInitialized(String field); + + /** + * Forcefully mark the entity as being dirty. + */ + public void dirty(); + + /** + * Is the entity considered dirty? + * + * @return True if the entity is dirty; otherwise false. + */ + public boolean isDirty(); + + /** + * Clear the internal dirty flag. + */ + public void clearDirty(); +} diff --git a/src/org/hibernate/intercept/LazyPropertyInitializer.java b/src/org/hibernate/intercept/LazyPropertyInitializer.java new file mode 100755 index 0000000000..dd2355f426 --- /dev/null +++ b/src/org/hibernate/intercept/LazyPropertyInitializer.java @@ -0,0 +1,34 @@ +//$Id$ +package org.hibernate.intercept; + +import java.io.Serializable; + +import org.hibernate.HibernateException; +import org.hibernate.engine.SessionImplementor; + +/** + * Contract for controlling how lazy properties get initialized. + * + * @author Gavin King + */ +public interface LazyPropertyInitializer { + + /** + * Marker value for uninitialized properties + */ + public static final Serializable UNFETCHED_PROPERTY = new Serializable() { + public String toString() { + return ""; + } + public Object readResolve() { + return UNFETCHED_PROPERTY; + } + }; + + /** + * Initialize the property, and return its new value + */ + public Object initializeLazyProperty(String fieldName, Object entity, SessionImplementor session) + throws HibernateException; + +} diff --git a/src/org/hibernate/intercept/cglib/CGLIBHelper.java b/src/org/hibernate/intercept/cglib/CGLIBHelper.java new file mode 100644 index 0000000000..187142bef6 --- /dev/null +++ b/src/org/hibernate/intercept/cglib/CGLIBHelper.java @@ -0,0 +1,32 @@ +package org.hibernate.intercept.cglib; + +import org.hibernate.intercept.FieldInterceptor; +import org.hibernate.engine.SessionImplementor; +import net.sf.cglib.transform.impl.InterceptFieldEnabled; + +import java.util.Set; + +/** + * @author Steve Ebersole + */ +public class CGLIBHelper { + private CGLIBHelper() { + } + + public static FieldInterceptor extractFieldInterceptor(Object entity) { + return ( FieldInterceptor ) ( ( InterceptFieldEnabled ) entity ).getInterceptFieldCallback(); + } + + public static FieldInterceptor injectFieldInterceptor( + Object entity, + String entityName, + Set uninitializedFieldNames, + SessionImplementor session) { + FieldInterceptorImpl fieldInterceptor = new FieldInterceptorImpl( + session, uninitializedFieldNames, entityName + ); + ( ( InterceptFieldEnabled ) entity ).setInterceptFieldCallback( fieldInterceptor ); + return fieldInterceptor; + + } +} diff --git a/src/org/hibernate/intercept/cglib/FieldInterceptorImpl.java b/src/org/hibernate/intercept/cglib/FieldInterceptorImpl.java new file mode 100644 index 0000000000..8dd430331d --- /dev/null +++ b/src/org/hibernate/intercept/cglib/FieldInterceptorImpl.java @@ -0,0 +1,147 @@ +//$Id$ +package org.hibernate.intercept.cglib; + +import java.io.Serializable; +import java.util.Set; + +import net.sf.cglib.transform.impl.InterceptFieldCallback; + +import org.hibernate.intercept.AbstractFieldInterceptor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; + +/** + * A field-level interceptor that initializes lazily fetched properties. + * This interceptor can be attached to classes instrumented by CGLIB. + * Note that this implementation assumes that the instance variable + * name is the same as the name of the persistent property that must + * be loaded. + * + * @author Gavin King + */ +public final class FieldInterceptorImpl extends AbstractFieldInterceptor implements InterceptFieldCallback, Serializable { + + /** + * Package-protected constructor + * + * @param session + * @param uninitializedFields + * @param entityName + */ + FieldInterceptorImpl(SessionImplementor session, Set uninitializedFields, String entityName) { + super( session, uninitializedFields, entityName ); + } + + public boolean readBoolean(Object target, String name, boolean oldValue) { + return ( ( Boolean ) intercept( target, name, oldValue ? Boolean.TRUE : Boolean.FALSE ) ) + .booleanValue(); + } + + public byte readByte(Object target, String name, byte oldValue) { + return ( ( Byte ) intercept( target, name, new Byte( oldValue ) ) ).byteValue(); + } + + public char readChar(Object target, String name, char oldValue) { + return ( ( Character ) intercept( target, name, new Character( oldValue ) ) ) + .charValue(); + } + + public double readDouble(Object target, String name, double oldValue) { + return ( ( Double ) intercept( target, name, new Double( oldValue ) ) ) + .doubleValue(); + } + + public float readFloat(Object target, String name, float oldValue) { + return ( ( Float ) intercept( target, name, new Float( oldValue ) ) ) + .floatValue(); + } + + public int readInt(Object target, String name, int oldValue) { + return ( ( Integer ) intercept( target, name, new Integer( oldValue ) ) ) + .intValue(); + } + + public long readLong(Object target, String name, long oldValue) { + return ( ( Long ) intercept( target, name, new Long( oldValue ) ) ).longValue(); + } + + public short readShort(Object target, String name, short oldValue) { + return ( ( Short ) intercept( target, name, new Short( oldValue ) ) ) + .shortValue(); + } + + public Object readObject(Object target, String name, Object oldValue) { + Object value = intercept( target, name, oldValue ); + if (value instanceof HibernateProxy) { + LazyInitializer li = ( (HibernateProxy) value ).getHibernateLazyInitializer(); + if ( li.isUnwrap() ) { + value = li.getImplementation(); + } + } + return value; + } + + public boolean writeBoolean(Object target, String name, boolean oldValue, boolean newValue) { + dirty(); + intercept( target, name, oldValue ? Boolean.TRUE : Boolean.FALSE ); + return newValue; + } + + public byte writeByte(Object target, String name, byte oldValue, byte newValue) { + dirty(); + intercept( target, name, new Byte( oldValue ) ); + return newValue; + } + + public char writeChar(Object target, String name, char oldValue, char newValue) { + dirty(); + intercept( target, name, new Character( oldValue ) ); + return newValue; + } + + public double writeDouble(Object target, String name, double oldValue, double newValue) { + dirty(); + intercept( target, name, new Double( oldValue ) ); + return newValue; + } + + public float writeFloat(Object target, String name, float oldValue, float newValue) { + dirty(); + intercept( target, name, new Float( oldValue ) ); + return newValue; + } + + public int writeInt(Object target, String name, int oldValue, int newValue) { + dirty(); + intercept( target, name, new Integer( oldValue ) ); + return newValue; + } + + public long writeLong(Object target, String name, long oldValue, long newValue) { + dirty(); + intercept( target, name, new Long( oldValue ) ); + return newValue; + } + + public short writeShort(Object target, String name, short oldValue, short newValue) { + dirty(); + intercept( target, name, new Short( oldValue ) ); + return newValue; + } + + public Object writeObject(Object target, String name, Object oldValue, Object newValue) { + dirty(); + intercept( target, name, oldValue ); + return newValue; + } + + public String toString() { + return "FieldInterceptorImpl(" + + "entityName=" + getEntityName() + + ",dirty=" + isDirty() + + ",uninitializedFields=" + getUninitializedFields() + + ')'; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/intercept/javassist/FieldInterceptorImpl.java b/src/org/hibernate/intercept/javassist/FieldInterceptorImpl.java new file mode 100644 index 0000000000..b356bd6807 --- /dev/null +++ b/src/org/hibernate/intercept/javassist/FieldInterceptorImpl.java @@ -0,0 +1,154 @@ +//$Id$ +package org.hibernate.intercept.javassist; + +import java.io.Serializable; +import java.util.Set; + +import org.hibernate.bytecode.javassist.FieldHandler; +import org.hibernate.intercept.AbstractFieldInterceptor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.proxy.HibernateProxy; +import org.hibernate.proxy.LazyInitializer; + +/** + * A field-level interceptor that initializes lazily fetched properties. + * This interceptor can be attached to classes instrumented by Javassist. + * Note that this implementation assumes that the instance variable + * name is the same as the name of the persistent property that must + * be loaded. + *

    + * Note: most of the interesting functionality here is farmed off + * to the super-class. The stuff here mainly acts as an adapter to the + * Javassist-specific functionality, routing interception through + * the super-class's intercept() method + * + * @author Steve Ebersole + */ +public final class FieldInterceptorImpl extends AbstractFieldInterceptor implements FieldHandler, Serializable { + + /** + * Package-protected constructor. + * + * @param session + * @param uninitializedFields + * @param entityName + */ + FieldInterceptorImpl(SessionImplementor session, Set uninitializedFields, String entityName) { + super( session, uninitializedFields, entityName ); + } + + + // FieldHandler impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public boolean readBoolean(Object target, String name, boolean oldValue) { + return ( ( Boolean ) intercept( target, name, oldValue ? Boolean.TRUE : Boolean.FALSE ) ) + .booleanValue(); + } + + public byte readByte(Object target, String name, byte oldValue) { + return ( ( Byte ) intercept( target, name, new Byte( oldValue ) ) ).byteValue(); + } + + public char readChar(Object target, String name, char oldValue) { + return ( ( Character ) intercept( target, name, new Character( oldValue ) ) ) + .charValue(); + } + + public double readDouble(Object target, String name, double oldValue) { + return ( ( Double ) intercept( target, name, new Double( oldValue ) ) ) + .doubleValue(); + } + + public float readFloat(Object target, String name, float oldValue) { + return ( ( Float ) intercept( target, name, new Float( oldValue ) ) ) + .floatValue(); + } + + public int readInt(Object target, String name, int oldValue) { + return ( ( Integer ) intercept( target, name, new Integer( oldValue ) ) ) + .intValue(); + } + + public long readLong(Object target, String name, long oldValue) { + return ( ( Long ) intercept( target, name, new Long( oldValue ) ) ).longValue(); + } + + public short readShort(Object target, String name, short oldValue) { + return ( ( Short ) intercept( target, name, new Short( oldValue ) ) ) + .shortValue(); + } + + public Object readObject(Object target, String name, Object oldValue) { + Object value = intercept( target, name, oldValue ); + if (value instanceof HibernateProxy) { + LazyInitializer li = ( (HibernateProxy) value ).getHibernateLazyInitializer(); + if ( li.isUnwrap() ) { + value = li.getImplementation(); + } + } + return value; + } + + public boolean writeBoolean(Object target, String name, boolean oldValue, boolean newValue) { + dirty(); + intercept( target, name, oldValue ? Boolean.TRUE : Boolean.FALSE ); + return newValue; + } + + public byte writeByte(Object target, String name, byte oldValue, byte newValue) { + dirty(); + intercept( target, name, new Byte( oldValue ) ); + return newValue; + } + + public char writeChar(Object target, String name, char oldValue, char newValue) { + dirty(); + intercept( target, name, new Character( oldValue ) ); + return newValue; + } + + public double writeDouble(Object target, String name, double oldValue, double newValue) { + dirty(); + intercept( target, name, new Double( oldValue ) ); + return newValue; + } + + public float writeFloat(Object target, String name, float oldValue, float newValue) { + dirty(); + intercept( target, name, new Float( oldValue ) ); + return newValue; + } + + public int writeInt(Object target, String name, int oldValue, int newValue) { + dirty(); + intercept( target, name, new Integer( oldValue ) ); + return newValue; + } + + public long writeLong(Object target, String name, long oldValue, long newValue) { + dirty(); + intercept( target, name, new Long( oldValue ) ); + return newValue; + } + + public short writeShort(Object target, String name, short oldValue, short newValue) { + dirty(); + intercept( target, name, new Short( oldValue ) ); + return newValue; + } + + public Object writeObject(Object target, String name, Object oldValue, Object newValue) { + dirty(); + intercept( target, name, oldValue ); + return newValue; + } + + public String toString() { + return "FieldInterceptorImpl(" + + "entityName=" + getEntityName() + + ",dirty=" + isDirty() + + ",uninitializedFields=" + getUninitializedFields() + + ')'; + } + +} \ No newline at end of file diff --git a/src/org/hibernate/intercept/javassist/JavassistHelper.java b/src/org/hibernate/intercept/javassist/JavassistHelper.java new file mode 100644 index 0000000000..023c35eb95 --- /dev/null +++ b/src/org/hibernate/intercept/javassist/JavassistHelper.java @@ -0,0 +1,29 @@ +package org.hibernate.intercept.javassist; + +import org.hibernate.intercept.FieldInterceptor; +import org.hibernate.engine.SessionImplementor; +import org.hibernate.bytecode.javassist.FieldHandled; + +import java.util.Set; + +/** + * @author Steve Ebersole + */ +public class JavassistHelper { + private JavassistHelper() { + } + + public static FieldInterceptor extractFieldInterceptor(Object entity) { + return ( FieldInterceptor ) ( ( FieldHandled ) entity ).getFieldHandler(); + } + + public static FieldInterceptor injectFieldInterceptor( + Object entity, + String entityName, + Set uninitializedFieldNames, + SessionImplementor session) { + FieldInterceptorImpl fieldInterceptor = new FieldInterceptorImpl( session, uninitializedFieldNames, entityName ); + ( ( FieldHandled ) entity ).setFieldHandler( fieldInterceptor ); + return fieldInterceptor; + } +} diff --git a/src/org/hibernate/intercept/package.html b/src/org/hibernate/intercept/package.html new file mode 100755 index 0000000000..4ea7033f84 --- /dev/null +++ b/src/org/hibernate/intercept/package.html @@ -0,0 +1,10 @@ + + + +

    + This package implements an interception + mechanism for lazy property fetching, + based on CGLIB bytecode instrumentation. +

    + + diff --git a/src/org/hibernate/jdbc/AbstractBatcher.java b/src/org/hibernate/jdbc/AbstractBatcher.java new file mode 100644 index 0000000000..a8b64a2fbe --- /dev/null +++ b/src/org/hibernate/jdbc/AbstractBatcher.java @@ -0,0 +1,620 @@ +//$Id$ +package org.hibernate.jdbc; + +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashSet; +import java.util.Iterator; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.AssertionFailure; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.ScrollMode; +import org.hibernate.TransactionException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.pretty.Formatter; +import org.hibernate.util.GetGeneratedKeysHelper; +import org.hibernate.util.JDBCExceptionReporter; +import org.hibernate.util.NamedGeneratedKeysHelper; + +/** + * Manages prepared statements and batching. + * + * @author Gavin King + */ +public abstract class AbstractBatcher implements Batcher { + + private static int globalOpenPreparedStatementCount; + private static int globalOpenResultSetCount; + + private int openPreparedStatementCount; + private int openResultSetCount; + + protected static final Log log = LogFactory.getLog(AbstractBatcher.class); + protected static final Log SQL_LOG = LogFactory.getLog("org.hibernate.SQL"); + + private final ConnectionManager connectionManager; + private final SessionFactoryImplementor factory; + + private PreparedStatement batchUpdate; + private String batchUpdateSQL; + + private HashSet statementsToClose = new HashSet(); + private HashSet resultSetsToClose = new HashSet(); + private PreparedStatement lastQuery; + + private boolean releasing = false; + private final Interceptor interceptor; + + private long transactionTimeout = -1; + boolean isTransactionTimeoutSet; + + public AbstractBatcher(ConnectionManager connectionManager, Interceptor interceptor) { + this.connectionManager = connectionManager; + this.interceptor = interceptor; + this.factory = connectionManager.getFactory(); + } + + public void setTransactionTimeout(int seconds) { + isTransactionTimeoutSet = true; + transactionTimeout = System.currentTimeMillis() / 1000 + seconds; + } + + public void unsetTransactionTimeout() { + isTransactionTimeoutSet = false; + } + + protected PreparedStatement getStatement() { + return batchUpdate; + } + + public CallableStatement prepareCallableStatement(String sql) + throws SQLException, HibernateException { + executeBatch(); + logOpenPreparedStatement(); + return getCallableStatement( connectionManager.getConnection(), sql, false); + } + + public PreparedStatement prepareStatement(String sql) + throws SQLException, HibernateException { + return prepareStatement( sql, false ); + } + + public PreparedStatement prepareStatement(String sql, boolean getGeneratedKeys) + throws SQLException, HibernateException { + executeBatch(); + logOpenPreparedStatement(); + return getPreparedStatement( + connectionManager.getConnection(), + sql, + false, + getGeneratedKeys, + null, + null, + false + ); + } + + public PreparedStatement prepareStatement(String sql, String[] columnNames) + throws SQLException, HibernateException { + executeBatch(); + logOpenPreparedStatement(); + return getPreparedStatement( + connectionManager.getConnection(), + sql, + false, + false, + columnNames, + null, + false + ); + } + + public PreparedStatement prepareSelectStatement(String sql) + throws SQLException, HibernateException { + logOpenPreparedStatement(); + return getPreparedStatement( + connectionManager.getConnection(), + sql, + false, + false, + null, + null, + false + ); + } + + public PreparedStatement prepareQueryStatement( + String sql, + boolean scrollable, + ScrollMode scrollMode) throws SQLException, HibernateException { + logOpenPreparedStatement(); + PreparedStatement ps = getPreparedStatement( + connectionManager.getConnection(), + sql, + scrollable, + scrollMode + ); + setStatementFetchSize( ps ); + statementsToClose.add( ps ); + lastQuery = ps; + return ps; + } + + public CallableStatement prepareCallableQueryStatement( + String sql, + boolean scrollable, + ScrollMode scrollMode) throws SQLException, HibernateException { + logOpenPreparedStatement(); + CallableStatement ps = ( CallableStatement ) getPreparedStatement( + connectionManager.getConnection(), + sql, + scrollable, + false, + null, + scrollMode, + true + ); + setStatementFetchSize( ps ); + statementsToClose.add( ps ); + lastQuery = ps; + return ps; + } + + public void abortBatch(SQLException sqle) { + try { + if (batchUpdate!=null) closeStatement(batchUpdate); + } + catch (SQLException e) { + //noncritical, swallow and let the other propagate! + JDBCExceptionReporter.logExceptions(e); + } + finally { + batchUpdate=null; + batchUpdateSQL=null; + } + } + + public ResultSet getResultSet(PreparedStatement ps) throws SQLException { + ResultSet rs = ps.executeQuery(); + resultSetsToClose.add(rs); + logOpenResults(); + return rs; + } + + public ResultSet getResultSet(CallableStatement ps, Dialect dialect) throws SQLException { + ResultSet rs = dialect.getResultSet(ps); + resultSetsToClose.add(rs); + logOpenResults(); + return rs; + + } + + public void closeQueryStatement(PreparedStatement ps, ResultSet rs) throws SQLException { + boolean psStillThere = statementsToClose.remove( ps ); + try { + if ( rs != null ) { + if ( resultSetsToClose.remove( rs ) ) { + logCloseResults(); + rs.close(); + } + } + } + finally { + if ( psStillThere ) { + closeQueryStatement( ps ); + } + } + } + + public PreparedStatement prepareBatchStatement(String sql) + throws SQLException, HibernateException { + sql = getSQL( sql ); + + if ( !sql.equals(batchUpdateSQL) ) { + batchUpdate=prepareStatement(sql); // calls executeBatch() + batchUpdateSQL=sql; + } + else { + log.debug("reusing prepared statement"); + log(sql); + } + return batchUpdate; + } + + public CallableStatement prepareBatchCallableStatement(String sql) + throws SQLException, HibernateException { + if ( !sql.equals(batchUpdateSQL) ) { // TODO: what if batchUpdate is a callablestatement ? + batchUpdate=prepareCallableStatement(sql); // calls executeBatch() + batchUpdateSQL=sql; + } + return (CallableStatement)batchUpdate; + } + + + public void executeBatch() throws HibernateException { + if (batchUpdate!=null) { + try { + try { + doExecuteBatch(batchUpdate); + } + finally { + closeStatement(batchUpdate); + } + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + factory.getSQLExceptionConverter(), + sqle, + "Could not execute JDBC batch update", + batchUpdateSQL + ); + } + finally { + batchUpdate=null; + batchUpdateSQL=null; + } + } + } + + public void closeStatement(PreparedStatement ps) throws SQLException { + logClosePreparedStatement(); + closePreparedStatement(ps); + } + + private void closeQueryStatement(PreparedStatement ps) throws SQLException { + + try { + //work around a bug in all known connection pools.... + if ( ps.getMaxRows()!=0 ) ps.setMaxRows(0); + if ( ps.getQueryTimeout()!=0 ) ps.setQueryTimeout(0); + } + catch (Exception e) { + log.warn("exception clearing maxRows/queryTimeout", e); +// ps.close(); //just close it; do NOT try to return it to the pool! + return; //NOTE: early exit! + } + finally { + closeStatement(ps); + } + + if ( lastQuery==ps ) lastQuery = null; + + } + + /** + * Actually releases the batcher, allowing it to cleanup internally held + * resources. + */ + public void closeStatements() { + try { + releasing = true; + + try { + if (batchUpdate!=null) batchUpdate.close(); + } + catch (SQLException sqle) { + //no big deal + log.warn("Could not close a JDBC prepared statement", sqle); + } + batchUpdate=null; + batchUpdateSQL=null; + + Iterator iter = resultSetsToClose.iterator(); + while ( iter.hasNext() ) { + try { + logCloseResults(); + ( (ResultSet) iter.next() ).close(); + } + catch (SQLException e) { + // no big deal + log.warn("Could not close a JDBC result set", e); + } + catch (Throwable e) { + // sybase driver (jConnect) throwing NPE here in certain cases + log.warn("Could not close a JDBC result set", e); + } + } + resultSetsToClose.clear(); + + iter = statementsToClose.iterator(); + while ( iter.hasNext() ) { + try { + closeQueryStatement( (PreparedStatement) iter.next() ); + } + catch (SQLException e) { + // no big deal + log.warn("Could not close a JDBC statement", e); + } + } + statementsToClose.clear(); + } + finally { + releasing = false; + } + } + + protected abstract void doExecuteBatch(PreparedStatement ps) throws SQLException, HibernateException; + + private String preparedStatementCountsToString() { + return + " (open PreparedStatements: " + + openPreparedStatementCount + + ", globally: " + + globalOpenPreparedStatementCount + + ")"; + } + + private String resultSetCountsToString() { + return + " (open ResultSets: " + + openResultSetCount + + ", globally: " + + globalOpenResultSetCount + + ")"; + } + + private void logOpenPreparedStatement() { + if ( log.isDebugEnabled() ) { + log.debug( "about to open PreparedStatement" + preparedStatementCountsToString() ); + openPreparedStatementCount++; + globalOpenPreparedStatementCount++; + } + } + + private void logClosePreparedStatement() { + if ( log.isDebugEnabled() ) { + log.debug( "about to close PreparedStatement" + preparedStatementCountsToString() ); + openPreparedStatementCount--; + globalOpenPreparedStatementCount--; + } + } + + private void logOpenResults() { + if ( log.isDebugEnabled() ) { + log.debug( "about to open ResultSet" + resultSetCountsToString() ); + openResultSetCount++; + globalOpenResultSetCount++; + } + } + private void logCloseResults() { + if ( log.isDebugEnabled() ) { + log.debug( "about to close ResultSet" + resultSetCountsToString() ); + openResultSetCount--; + globalOpenResultSetCount--; + } + } + + protected SessionFactoryImplementor getFactory() { + return factory; + } + + private void log(String sql) { + if ( SQL_LOG.isDebugEnabled() ) { + SQL_LOG.debug( format(sql) ); + } + if ( factory.getSettings().isShowSqlEnabled() ) { + System.out.println( "Hibernate: " + format(sql) ); + } + } + + private String format(String sql) { + if ( factory.getSettings().isFormatSqlEnabled() ) { + return new Formatter(sql).format(); + } + else { + return sql; + } + } + + private PreparedStatement getPreparedStatement( + final Connection conn, + final String sql, + final boolean scrollable, + final ScrollMode scrollMode) + throws SQLException { + return getPreparedStatement( + conn, + sql, + scrollable, + false, + null, + scrollMode, + false + ); + } + + private CallableStatement getCallableStatement( + final Connection conn, + String sql, + boolean scrollable) throws SQLException { + if ( scrollable && !factory.getSettings().isScrollableResultSetsEnabled() ) { + throw new AssertionFailure("scrollable result sets are not enabled"); + } + + sql = getSQL( sql ); + log( sql ); + + log.trace("preparing callable statement"); + if ( scrollable ) { + return conn.prepareCall( + sql, + ResultSet.TYPE_SCROLL_INSENSITIVE, + ResultSet.CONCUR_READ_ONLY + ); + } + else { + return conn.prepareCall( sql ); + } + } + + private String getSQL(String sql) { + sql = interceptor.onPrepareStatement( sql ); + if ( sql==null || sql.length() == 0 ) { + throw new AssertionFailure( "Interceptor.onPrepareStatement() returned null or empty string." ); + } + return sql; + } + + private PreparedStatement getPreparedStatement( + final Connection conn, + String sql, + boolean scrollable, + final boolean useGetGeneratedKeys, + final String[] namedGeneratedKeys, + final ScrollMode scrollMode, + final boolean callable) throws SQLException { + if ( scrollable && !factory.getSettings().isScrollableResultSetsEnabled() ) { + throw new AssertionFailure("scrollable result sets are not enabled"); + } + if ( useGetGeneratedKeys && !factory.getSettings().isGetGeneratedKeysEnabled() ) { + throw new AssertionFailure("getGeneratedKeys() support is not enabled"); + } + + sql = getSQL( sql ); + log( sql ); + + log.trace( "preparing statement" ); + PreparedStatement result; + if ( scrollable ) { + if ( callable ) { + result = conn.prepareCall( sql, scrollMode.toResultSetType(), ResultSet.CONCUR_READ_ONLY ); + } + else { + result = conn.prepareStatement( sql, scrollMode.toResultSetType(), ResultSet.CONCUR_READ_ONLY ); + } + } + else if ( useGetGeneratedKeys ) { + result = GetGeneratedKeysHelper.prepareStatement( conn, sql ); + } + else if ( namedGeneratedKeys != null ) { + result = NamedGeneratedKeysHelper.prepareStatement( conn, sql, namedGeneratedKeys ); + } + else { + if ( callable ) { + result = conn.prepareCall( sql ); + } + else { + result = conn.prepareStatement( sql ); + } + } + + setTimeout( result ); + + if ( factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().prepareStatement(); + } + + return result; + + } + + private void setTimeout(PreparedStatement result) throws SQLException { + if ( isTransactionTimeoutSet ) { + int timeout = (int) ( transactionTimeout - ( System.currentTimeMillis() / 1000 ) ); + if (timeout<=0) { + throw new TransactionException("transaction timeout expired"); + } + else { + result.setQueryTimeout(timeout); + } + } + } + + private void closePreparedStatement(PreparedStatement ps) throws SQLException { + try { + log.trace("closing statement"); + ps.close(); + if ( factory.getStatistics().isStatisticsEnabled() ) { + factory.getStatisticsImplementor().closeStatement(); + } + } + finally { + if ( !releasing ) { + // If we are in the process of releasing, no sense + // checking for aggressive-release possibility. + connectionManager.afterStatement(); + } + } + } + + private void setStatementFetchSize(PreparedStatement statement) throws SQLException { + Integer statementFetchSize = factory.getSettings().getJdbcFetchSize(); + if ( statementFetchSize!=null ) { + statement.setFetchSize( statementFetchSize.intValue() ); + } + } + + public Connection openConnection() throws HibernateException { + log.debug("opening JDBC connection"); + try { + return factory.getConnectionProvider().getConnection(); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + factory.getSQLExceptionConverter(), + sqle, + "Cannot open connection" + ); + } + } + + public void closeConnection(Connection conn) throws HibernateException { + if ( log.isDebugEnabled() ) { + log.debug( + "closing JDBC connection" + + preparedStatementCountsToString() + + resultSetCountsToString() + ); + } + + try { + if ( !conn.isClosed() ) { + JDBCExceptionReporter.logAndClearWarnings(conn); + } + factory.getConnectionProvider().closeConnection(conn); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + factory.getSQLExceptionConverter(), + sqle, + "Cannot close connection" + ); + } + } + + public void cancelLastQuery() throws HibernateException { + try { + if (lastQuery!=null) lastQuery.cancel(); + } + catch (SQLException sqle) { + throw JDBCExceptionHelper.convert( + factory.getSQLExceptionConverter(), + sqle, + "Cannot cancel query" + ); + } + } + + public boolean hasOpenResources() { + return resultSetsToClose.size() > 0 || statementsToClose.size() > 0; + } + + public String openResourceStatsAsString() { + return preparedStatementCountsToString() + resultSetCountsToString(); + } + +} + + + + + + diff --git a/src/org/hibernate/jdbc/BatchFailedException.java b/src/org/hibernate/jdbc/BatchFailedException.java new file mode 100644 index 0000000000..7845dcff2e --- /dev/null +++ b/src/org/hibernate/jdbc/BatchFailedException.java @@ -0,0 +1,18 @@ +package org.hibernate.jdbc; + +import org.hibernate.HibernateException; + +/** + * Indicates a failed batch entry (-3 return). + * + * @author Steve Ebersole + */ +public class BatchFailedException extends HibernateException { + public BatchFailedException(String s) { + super( s ); + } + + public BatchFailedException(String string, Throwable root) { + super( string, root ); + } +} diff --git a/src/org/hibernate/jdbc/BatchedTooManyRowsAffectedException.java b/src/org/hibernate/jdbc/BatchedTooManyRowsAffectedException.java new file mode 100644 index 0000000000..c5fc6132a1 --- /dev/null +++ b/src/org/hibernate/jdbc/BatchedTooManyRowsAffectedException.java @@ -0,0 +1,21 @@ +package org.hibernate.jdbc; + +/** + * Much like {@link TooManyRowsAffectedException}, indicates that more + * rows than what we were expcecting were affected. Additionally, this form + * occurs from a batch and carries along the batch positon that failed. + * + * @author Steve Ebersole + */ +public class BatchedTooManyRowsAffectedException extends TooManyRowsAffectedException { + private final int batchPosition; + + public BatchedTooManyRowsAffectedException(String message, int expectedRowCount, int actualRowCount, int batchPosition) { + super( message, expectedRowCount, actualRowCount ); + this.batchPosition = batchPosition; + } + + public int getBatchPosition() { + return batchPosition; + } +} diff --git a/src/org/hibernate/jdbc/Batcher.java b/src/org/hibernate/jdbc/Batcher.java new file mode 100644 index 0000000000..d3ee2f7282 --- /dev/null +++ b/src/org/hibernate/jdbc/Batcher.java @@ -0,0 +1,163 @@ +//$Id$ +package org.hibernate.jdbc; + +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.hibernate.HibernateException; +import org.hibernate.ScrollMode; +import org.hibernate.dialect.Dialect; + +/** + * Manages PreparedStatements for a session. Abstracts JDBC + * batching to maintain the illusion that a single logical batch + * exists for the whole session, even when batching is disabled. + * Provides transparent PreparedStatement caching. + * + * @see java.sql.PreparedStatement + * @see org.hibernate.impl.SessionImpl + * @author Gavin King + */ +public interface Batcher { + /** + * Get a prepared statement for use in loading / querying. If not explicitly + * released by closeQueryStatement(), it will be released when the + * session is closed or disconnected. + */ + public PreparedStatement prepareQueryStatement(String sql, boolean scrollable, ScrollMode scrollMode) throws SQLException, HibernateException; + /** + * Close a prepared statement opened with prepareQueryStatement() + */ + public void closeQueryStatement(PreparedStatement ps, ResultSet rs) throws SQLException; + /** + * Get a prepared statement for use in loading / querying. If not explicitly + * released by closeQueryStatement(), it will be released when the + * session is closed or disconnected. + */ + public CallableStatement prepareCallableQueryStatement(String sql, boolean scrollable, ScrollMode scrollMode) throws SQLException, HibernateException; + + + /** + * Get a non-batchable prepared statement to use for selecting. Does not + * result in execution of the current batch. + */ + public PreparedStatement prepareSelectStatement(String sql) throws SQLException, HibernateException; + + /** + * Get a non-batchable prepared statement to use for inserting / deleting / updating, + * using JDBC3 getGeneratedKeys ({@link Connection#prepareStatement(String, int)}). + *

    + * Must be explicitly released by {@link #closeStatement} after use. + */ + public PreparedStatement prepareStatement(String sql, boolean useGetGeneratedKeys) throws SQLException, HibernateException; + + /** + * Get a non-batchable prepared statement to use for inserting / deleting / updating. + * using JDBC3 getGeneratedKeys ({@link Connection#prepareStatement(String, String[])}). + *

    + * Must be explicitly released by {@link #closeStatement} after use. + */ + public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException, HibernateException; + + /** + * Get a non-batchable prepared statement to use for inserting / deleting / updating. + *

    + * Must be explicitly released by {@link #closeStatement} after use. + */ + public PreparedStatement prepareStatement(String sql) throws SQLException, HibernateException; + + /** + * Get a non-batchable callable statement to use for inserting / deleting / updating. + *

    + * Must be explicitly released by {@link #closeStatement} after use. + */ + public CallableStatement prepareCallableStatement(String sql) throws SQLException, HibernateException; + + /** + * Close a prepared or callable statement opened using prepareStatement() or prepareCallableStatement() + */ + public void closeStatement(PreparedStatement ps) throws SQLException; + + /** + * Get a batchable prepared statement to use for inserting / deleting / updating + * (might be called many times before a single call to executeBatch()). + * After setting parameters, call addToBatch - do not execute the + * statement explicitly. + * @see Batcher#addToBatch + */ + public PreparedStatement prepareBatchStatement(String sql) throws SQLException, HibernateException; + + /** + * Get a batchable callable statement to use for inserting / deleting / updating + * (might be called many times before a single call to executeBatch()). + * After setting parameters, call addToBatch - do not execute the + * statement explicitly. + * @see Batcher#addToBatch + */ + public CallableStatement prepareBatchCallableStatement(String sql) throws SQLException, HibernateException; + + /** + * Add an insert / delete / update to the current batch (might be called multiple times + * for single prepareBatchStatement()) + */ + public void addToBatch(Expectation expectation) throws SQLException, HibernateException; + + /** + * Execute the batch + */ + public void executeBatch() throws HibernateException; + + /** + * Close any query statements that were left lying around + */ + public void closeStatements(); + /** + * Execute the statement and return the result set + */ + public ResultSet getResultSet(PreparedStatement ps) throws SQLException; + /** + * Execute the statement and return the result set from a callable statement + */ + public ResultSet getResultSet(CallableStatement ps, Dialect dialect) throws SQLException; + + /** + * Must be called when an exception occurs + * @param sqle the (not null) exception that is the reason for aborting + */ + public void abortBatch(SQLException sqle); + + /** + * Cancel the current query statement + */ + public void cancelLastQuery() throws HibernateException; + + public boolean hasOpenResources(); + + public String openResourceStatsAsString(); + + // TODO : remove these last two as batcher is no longer managing connections + + /** + * Obtain a JDBC connection + */ + public Connection openConnection() throws HibernateException; + /** + * Dispose of the JDBC connection + */ + public void closeConnection(Connection conn) throws HibernateException; + + /** + * Set the transaction timeout to seconds later + * than the current system time. + */ + public void setTransactionTimeout(int seconds); + /** + * Unset the transaction timeout, called after the end of a + * transaction. + */ + public void unsetTransactionTimeout(); +} + diff --git a/src/org/hibernate/jdbc/BatcherFactory.java b/src/org/hibernate/jdbc/BatcherFactory.java new file mode 100755 index 0000000000..0c262d362b --- /dev/null +++ b/src/org/hibernate/jdbc/BatcherFactory.java @@ -0,0 +1,13 @@ +//$Id$ +package org.hibernate.jdbc; + +import org.hibernate.Interceptor; + + +/** + * Factory for Batcher instances. + * @author Gavin King + */ +public interface BatcherFactory { + public Batcher createBatcher(ConnectionManager connectionManager, Interceptor interceptor); +} diff --git a/src/org/hibernate/jdbc/BatchingBatcher.java b/src/org/hibernate/jdbc/BatchingBatcher.java new file mode 100644 index 0000000000..16350ddce0 --- /dev/null +++ b/src/org/hibernate/jdbc/BatchingBatcher.java @@ -0,0 +1,78 @@ +//$Id$ +package org.hibernate.jdbc; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.StaleStateException; + +/** + * An implementation of the Batcher interface that + * actually uses batching + * @author Gavin King + */ +public class BatchingBatcher extends AbstractBatcher { + + private int batchSize; + private Expectation[] expectations; + + public BatchingBatcher(ConnectionManager connectionManager, Interceptor interceptor) { + super( connectionManager, interceptor ); + expectations = new Expectation[ getFactory().getSettings().getJdbcBatchSize() ]; + } + + public void addToBatch(Expectation expectation) throws SQLException, HibernateException { + if ( !expectation.canBeBatched() ) { + throw new HibernateException( "attempting to batch an operation which cannot be batched" ); + } + PreparedStatement batchUpdate = getStatement(); + batchUpdate.addBatch(); + expectations[ batchSize++ ] = expectation; + if ( batchSize == getFactory().getSettings().getJdbcBatchSize() ) { + doExecuteBatch( batchUpdate ); + } + } + + protected void doExecuteBatch(PreparedStatement ps) throws SQLException, HibernateException { + if ( batchSize == 0 ) { + log.debug( "no batched statements to execute" ); + } + else { + if ( log.isDebugEnabled() ) { + log.debug( "Executing batch size: " + batchSize ); + } + + try { + checkRowCounts( ps.executeBatch(), ps ); + } + catch (RuntimeException re) { + log.error( "Exception executing batch: ", re ); + throw re; + } + finally { + batchSize = 0; + } + + } + + } + + private void checkRowCounts(int[] rowCounts, PreparedStatement ps) throws SQLException, HibernateException { + int numberOfRowCounts = rowCounts.length; + if ( numberOfRowCounts != batchSize ) { + log.warn( "JDBC driver did not return the expected number of row counts" ); + } + for ( int i = 0; i < numberOfRowCounts; i++ ) { + expectations[i].verifyOutcome( rowCounts[i], ps, i ); + } + } + +} + + + + + + diff --git a/src/org/hibernate/jdbc/BatchingBatcherFactory.java b/src/org/hibernate/jdbc/BatchingBatcherFactory.java new file mode 100755 index 0000000000..7c0852f202 --- /dev/null +++ b/src/org/hibernate/jdbc/BatchingBatcherFactory.java @@ -0,0 +1,19 @@ +//$Id$ +package org.hibernate.jdbc; + +import org.hibernate.Interceptor; + + +/** + * A BatcherFactory implementation which constructs Batcher instances + * capable of actually performing batch operations. + * + * @author Gavin King + */ +public class BatchingBatcherFactory implements BatcherFactory { + + public Batcher createBatcher(ConnectionManager connectionManager, Interceptor interceptor) { + return new BatchingBatcher( connectionManager, interceptor ); + } + +} diff --git a/src/org/hibernate/jdbc/BorrowedConnectionProxy.java b/src/org/hibernate/jdbc/BorrowedConnectionProxy.java new file mode 100644 index 0000000000..3d1103efb8 --- /dev/null +++ b/src/org/hibernate/jdbc/BorrowedConnectionProxy.java @@ -0,0 +1,117 @@ +package org.hibernate.jdbc; + +import org.hibernate.HibernateException; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.lang.reflect.InvocationTargetException; +import java.sql.Connection; + +/** + * A proxy for borrowed connections which funnels all requests back + * into the ConnectionManager from which it was borrowed to be properly + * handled (in terms of connection release modes). + *

    + * Note: the term borrowed here refers to connection references obtained + * via {@link org.hibernate.Session#connection()} for application usage. + * + * @author Steve Ebersole + */ +public class BorrowedConnectionProxy implements InvocationHandler { + + private static final Class[] PROXY_INTERFACES = new Class[] { Connection.class, ConnectionWrapper.class }; + + private final ConnectionManager connectionManager; + private boolean useable = true; + + public BorrowedConnectionProxy(ConnectionManager connectionManager) { + this.connectionManager = connectionManager; + } + + /** + * {@inheritDoc} + */ + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + if ( "close".equals( method.getName() ) ) { + connectionManager.releaseBorrowedConnection(); + return null; + } + // should probably no-op commit/rollback here, at least in JTA scenarios + if ( !useable ) { + throw new HibernateException( "connnection proxy not usable after transaction completion" ); + } + + if ( "getWrappedConnection".equals( method.getName() ) ) { + return connectionManager.getConnection(); + } + + try { + return method.invoke( connectionManager.getConnection(), args ); + } + catch( InvocationTargetException e ) { + throw e.getTargetException(); + } + } + + /** + * Generates a Connection proxy wrapping the connection managed by the passed + * connection manager. + * + * @param connectionManager The connection manager to wrap with the + * connection proxy. + * @return The generated proxy. + */ + public static Connection generateProxy(ConnectionManager connectionManager) { + BorrowedConnectionProxy handler = new BorrowedConnectionProxy( connectionManager ); + return ( Connection ) Proxy.newProxyInstance( + getProxyClassLoader(), + PROXY_INTERFACES, + handler + ); + } + + /** + * Marks a borrowed connection as no longer usable. + * + * @param connection The connection (proxy) to be marked. + */ + public static void renderUnuseable(Connection connection) { + if ( connection != null && Proxy.isProxyClass( connection.getClass() ) ) { + InvocationHandler handler = Proxy.getInvocationHandler( connection ); + if ( BorrowedConnectionProxy.class.isAssignableFrom( handler.getClass() ) ) { + ( ( BorrowedConnectionProxy ) handler ).useable = false; + } + } + } + + /** + * Convience method for unwrapping a connection proxy and getting a + * handle to an underlying connection. + * + * @param connection The connection (proxy) to be unwrapped. + * @return The unwrapped connection. + */ + public static Connection getWrappedConnection(Connection connection) { + if ( connection != null && connection instanceof ConnectionWrapper ) { + return ( ( ConnectionWrapper ) connection ).getWrappedConnection(); + } + else { + return connection; + } + } + + /** + * Determines the appropriate class loader to which the generated proxy + * should be scoped. + * + * @return The class loader appropriate for proxy construction. + */ + public static ClassLoader getProxyClassLoader() { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + if ( cl == null ) { + cl = BorrowedConnectionProxy.class.getClassLoader(); + } + return cl; + } +} diff --git a/src/org/hibernate/jdbc/ColumnNameCache.java b/src/org/hibernate/jdbc/ColumnNameCache.java new file mode 100644 index 0000000000..94e1575b04 --- /dev/null +++ b/src/org/hibernate/jdbc/ColumnNameCache.java @@ -0,0 +1,33 @@ +// $Id$ +package org.hibernate.jdbc; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Map; + +/** + * Implementation of ColumnNameCache. + * + * @author Steve Ebersole + */ +public class ColumnNameCache { + + private final Map columnNameToIndexCache; + + public ColumnNameCache(int columnCount) { + // should *not* need to grow beyond the size of the total number of columns in the rs + this.columnNameToIndexCache = new HashMap( columnCount ); + } + + public int getIndexForColumnName(String columnName, ResultSetWrapper rs)throws SQLException { + Integer cached = ( Integer ) columnNameToIndexCache.get( columnName ); + if ( cached != null ) { + return cached.intValue(); + } + else { + int index = rs.getTarget().findColumn( columnName ); + columnNameToIndexCache.put( columnName, new Integer(index) ); + return index; + } + } +} diff --git a/src/org/hibernate/jdbc/ConnectionManager.java b/src/org/hibernate/jdbc/ConnectionManager.java new file mode 100644 index 0000000000..b979a1c0c5 --- /dev/null +++ b/src/org/hibernate/jdbc/ConnectionManager.java @@ -0,0 +1,539 @@ +// $Id$ +package org.hibernate.jdbc; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.sql.Connection; +import java.sql.SQLException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.ConnectionReleaseMode; +import org.hibernate.HibernateException; +import org.hibernate.Interceptor; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.exception.JDBCExceptionHelper; +import org.hibernate.util.JDBCExceptionReporter; + +/** + * Encapsulates JDBC Connection management logic needed by Hibernate. + *

    + * The lifecycle is intended to span a logical series of interactions with the + * database. Internally, this means the the lifecycle of the Session. + * + * @author Steve Ebersole + */ +public class ConnectionManager implements Serializable { + + private static final Log log = LogFactory.getLog( ConnectionManager.class ); + + public static interface Callback { + public void connectionOpened(); + public void connectionCleanedUp(); + public boolean isTransactionInProgress(); + } + + private transient SessionFactoryImplementor factory; + private final Callback callback; + + private final ConnectionReleaseMode releaseMode; + private transient Connection connection; + private transient Connection borrowedConnection; + + private final boolean wasConnectionSupplied; + private transient Batcher batcher; + private transient Interceptor interceptor; + private boolean isClosed; + private transient boolean isFlushing; + + /** + * Constructs a ConnectionManager. + *

    + * This is the form used internally. + * + * @param factory The SessionFactory. + * @param callback An observer for internal state change. + * @param releaseMode The mode by which to release JDBC connections. + * @param connection An externally supplied connection. + */ + public ConnectionManager( + SessionFactoryImplementor factory, + Callback callback, + ConnectionReleaseMode releaseMode, + Connection connection, + Interceptor interceptor) { + this.factory = factory; + this.callback = callback; + + this.interceptor = interceptor; + this.batcher = factory.getSettings().getBatcherFactory().createBatcher( this, interceptor ); + + this.connection = connection; + wasConnectionSupplied = ( connection != null ); + + this.releaseMode = wasConnectionSupplied ? ConnectionReleaseMode.ON_CLOSE : releaseMode; + } + + /** + * Private constructor used exclusively from custom serialization + */ + private ConnectionManager( + SessionFactoryImplementor factory, + Callback callback, + ConnectionReleaseMode releaseMode, + Interceptor interceptor, + boolean wasConnectionSupplied, + boolean isClosed) { + this.factory = factory; + this.callback = callback; + + this.interceptor = interceptor; + this.batcher = factory.getSettings().getBatcherFactory().createBatcher( this, interceptor ); + + this.wasConnectionSupplied = wasConnectionSupplied; + this.isClosed = isClosed; + this.releaseMode = wasConnectionSupplied ? ConnectionReleaseMode.ON_CLOSE : releaseMode; + } + + /** + * The session factory. + * + * @return the session factory. + */ + public SessionFactoryImplementor getFactory() { + return factory; + } + + /** + * The batcher managed by this ConnectionManager. + * + * @return The batcher. + */ + public Batcher getBatcher() { + return batcher; + } + + /** + * Was the connection being used here supplied by the user? + * + * @return True if the user supplied the JDBC connection; false otherwise + */ + public boolean isSuppliedConnection() { + return wasConnectionSupplied; + } + + /** + * Retrieves the connection currently managed by this ConnectionManager. + *

    + * Note, that we may need to obtain a connection to return here if a + * connection has either not yet been obtained (non-UserSuppliedConnectionProvider) + * or has previously been aggressively released (if supported in this environment). + * + * @return The current Connection. + * + * @throws HibernateException Indicates a connection is currently not + * available (we are currently manually disconnected). + */ + public Connection getConnection() throws HibernateException { + if ( isClosed ) { + throw new HibernateException( "connection manager has been closed" ); + } + if ( connection == null ) { + openConnection(); + } + return connection; + } + + public boolean hasBorrowedConnection() { + // used from testsuite + return borrowedConnection != null; + } + + public Connection borrowConnection() { + if ( isClosed ) { + throw new HibernateException( "connection manager has been closed" ); + } + if ( isSuppliedConnection() ) { + return connection; + } + else { + if ( borrowedConnection == null ) { + borrowedConnection = BorrowedConnectionProxy.generateProxy( this ); + } + return borrowedConnection; + } + } + + public void releaseBorrowedConnection() { + if ( borrowedConnection != null ) { + try { + BorrowedConnectionProxy.renderUnuseable( borrowedConnection ); + } + finally { + borrowedConnection = null; + } + } + } + + /** + * Is the connection considered "auto-commit"? + * + * @return True if we either do not have a connection, or the connection + * really is in auto-commit mode. + * + * @throws SQLException Can be thrown by the Connection.isAutoCommit() check. + */ + public boolean isAutoCommit() throws SQLException { + return connection == null + || connection.isClosed() + || connection.getAutoCommit(); + } + + /** + * Will connections be released after each statement execution? + *

    + * Connections will be released after each statement if either: