diff --git a/.gitignore b/.gitignore
index adced34f0d..164c5fcfab 100644
--- a/.gitignore
+++ b/.gitignore
@@ -32,3 +32,10 @@ bin
# Miscellaneous
*.log
.clover
+
+# JBoss Transactions
+ObjectStore
+
+# Profiler and heap dumps
+*.jps
+*.hprof
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000..87d82fdf71
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,45 @@
+Guidelines for Contributing
+====
+Contributions from the community are essential in keeping Hibernate (any Open Source
+project really) strong and successful. While we try to keep requirements for
+contributing to a minimum, there are a few guidelines we ask that you mind.
+
+## Getting Started
+If you are just getting started with Git, GitHub and/or contributing to Hibernate via
+GitHub there are a few pre-requisite steps.
+
+* Make sure you have a [Hibernate Jira account](https://hibernate.onjira.com)
+* Make sure you have a [GitHub account](https://github.com/signup/free)
+* [Fork](http://help.github.com/fork-a-repo) the Hibernate repository. As discussed in
+the linked page, this also includes:
+ * [Set](https://help.github.com/articles/set-up-git) up your local git install
+ * Clone your fork
+
+
+## Create the working (topic) branch
+Create a "topic" branch on which you will work. The convention is to name the branch
+using the JIRA issue key. If there is not already a Jira issue covering the work you
+want to do, create one. Assuming you will be working from the master branch and working
+on the Jira HHH-123 : `git checkout -b HHH-123 master`
+
+
+## Code
+Do yo thang!
+
+## Commit
+
+* Make commits of logical units.
+* Be sure to use the JIRA issue key in the commit message. This is how Jira will pick
+up the related commits and display them on the Jira issue.
+* Make sure you have added the necessary tests for your changes.
+* Run _all_ the tests to assure nothing else was accidentally broken.
+
+_Prior to commiting, if you want to pull in the latest upstream changes (highly
+appreciated btw), please use rebasing rather than merging. Merging creates
+"merge commits" that really muck up the project timeline._
+
+## Submit
+* Sign the [Contributor License Agreement](https://cla.jboss.org/index.seam).
+* Push your changes to a topic branch in your fork of the repository.
+* Initiate a [pull request](http://help.github.com/send-pull-requests/)
+* Update the Jira issue, adding a comment inclusing a link to the created pull request
diff --git a/README.md b/README.md
index eb80686278..e7d3e9d98a 100644
--- a/README.md
+++ b/README.md
@@ -38,8 +38,12 @@ Executing Tasks
Gradle uses the concept of build tasks (equivalent to Ant targets). You can get a list of available tasks
via
- gradle --tasks
-
+ gradle tasks
+
+or if using gradle wrapper
+
+ ./gradlew tasks
+
### Executing Tasks Across All Modules
To execute a task across all modules, simply perform that task from the root directory. Gradle will visit each
diff --git a/build.gradle b/build.gradle
index 610a33bb85..bfc15ca270 100644
--- a/build.gradle
+++ b/build.gradle
@@ -179,6 +179,10 @@ subprojects { subProject ->
systemProperty entry.key, entry.value
}
}
+ }
+ test {
+ systemProperties['hibernate.test.validatefailureexpected'] = true
+ systemProperties += System.properties.findAll { it.key.startsWith( "hibernate.") }
maxHeapSize = "1024m"
}
diff --git a/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy b/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy
index 5cc4cc3988..0b86ce7b0d 100644
--- a/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy
+++ b/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy
@@ -54,7 +54,7 @@ class DatabaseAllocator {
"postgresql82", "postgresql83", "postgresql84", "postgresql91",
"mysql50", "mysql51","mysql55",
"db2-91", "db2-97",
- "mssql2005", "mssql2008R1", "mssql2008R2",
+ "mssql2005", "mssql2008R1", "mssql2008R2", "mssql2012",
"sybase155", "sybase157"
];
@@ -97,4 +97,4 @@ class DatabaseAllocator {
}
return (DatabaseAllocator) project.rootProject.properties[ DB_ALLOCATOR_KEY ];
}
-}
\ No newline at end of file
+}
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/Fetching.xml b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/Fetching.xml
new file mode 100644
index 0000000000..366979eb90
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/Fetching.xml
@@ -0,0 +1,228 @@
+
+
+
+
+ Fetching
+
+
+ Fetching, essentially, is the process of grabbing data from the database and making it available to the
+ application. Tuning how an application does fetching is one of the biggest factors in determining how an
+ application will perform. Fetching too much data, in terms of width (values/columns) and/or
+ depth (results/rows), adds unnecessary overhead in terms of both JDBC communication and ResultSet processing.
+ Fetching too little data causes additional fetches to be needed. Tuning how an application
+ fetches data presents a great opportunity to influence the application's overall performance.
+
+
+
+ The basics
+
+
+ The concept of fetching breaks down into two different questions.
+
+
+
+ When should the data be fetched? Now? Later?
+
+
+
+
+ How should the data be fetched?
+
+
+
+
+
+
+
+ "now" is generally termed eager or immediate. "later" is
+ generally termed lazy or delayed.
+
+
+
+
+ There are a number of scopes for defining fetching:
+
+
+
+ static - Static definition of fetching strategies is done in the
+ mappings. The statically-defined fetch strategies is used in the absence of any dynamically
+ defined strategies Except in the case of HQL/JPQL; see xyz.
+
+
+
+
+ dynamic (sometimes referred to as runtime) - Dynamic definition is
+ really use-case centric. There are 2 main ways to define dynamic fetching:
+
+
+
+
+ fetch profiles - defined in mappings, but can be
+ enabled/disabled on the Session.
+
+
+
+
+ HQL/JPQL and both Hibernate and JPA Criteria queries have the ability to specify
+ fetching, specific to said query.
+
+
+
+
+
+
+
+
+ The strategies
+
+ SELECT
+
+
+ Performs a separate SQL select to load the data. This can either be EAGER (the second select
+ is issued immediately) or LAZY (the second select is delayed until the data is needed). This
+ is the strategy generally termed N+1.
+
+
+
+
+ JOIN
+
+
+ Inherently an EAGER style of fetching. The data to be fetched is obtained through the use of
+ an SQL join.
+
+
+
+
+ BATCH
+
+
+ Performs a separate SQL select to load a number of related data items using an
+ IN-restriction as part of the SQL WHERE-clause based on a batch size. Again, this can either
+ be EAGER (the second select is issued immediately) or LAZY (the second select is delayed until
+ the data is needed).
+
+
+
+
+ SUBSELECT
+
+
+ Performs a separate SQL select to load associated data based on the SQL restriction used to
+ load the owner. Again, this can either be EAGER (the second select is issued immediately)
+ or LAZY (the second select is delayed until the data is needed).
+
+
+
+
+
+
+
+ Applying fetch strategies
+
+
+ Let's consider these topics as it relates to an simple domain model and a few use cases.
+
+
+
+ Sample domain model
+
+
+
+
+
+
+
+ The Hibernate recommendation is to statically mark all associations lazy and to use dynamic fetching
+ strategies for eagerness. This is unfortunately at odds with the JPA specification which defines that
+ all one-to-one and many-to-one associations should be eagerly fetched by default. Hibernate, as a JPA
+ provider honors that default.
+
+
+
+
+ No fetching
+ The login use-case
+
+ For the first use case, consider the application's login process for an Employee. Lets assume that
+ login only requires access to the Employee information, not Project nor Department information.
+
+
+
+ No fetching example
+
+
+
+
+ In this example, the application gets the Employee data. However, because all associations from
+ Employee are declared as LAZY (JPA defines the default for collections as LAZY) no other data is
+ fetched.
+
+
+
+ If the login process does not need access to the Employee information specifically, another
+ fetching optimization here would be to limit the width of the query results.
+
+
+
+ No fetching (scalar) example
+
+
+
+
+
+ Dynamic fetching via queries
+ The projects for an employee use-case
+
+
+ For the second use case, consider a screen displaying the Projects for an Employee. Certainly access
+ to the Employee is needed, as is the collection of Projects for that Employee. Information
+ about Departments, other Employees or other Projects is not needed.
+
+
+
+ Dynamic query fetching example
+
+
+
+
+
+ In this example we have an Employee and their Projects loaded in a single query shown both as an HQL
+ query and a JPA Criteria query. In both cases, this resolves to exactly one database query to get
+ all that information.
+
+
+
+
+ Dynamic fetching via profiles
+ The projects for an employee use-case using natural-id
+
+
+ Suppose we wanted to leverage loading by natural-id to obtain the Employee information in the
+ "projects for and employee" use-case. Loading by natural-id uses the statically defined fetching
+ strategies, but does not expose a means to define load-specific fetching. So we would leverage a
+ fetch profile.
+
+
+
+ Fetch profile example
+
+
+
+
+
+ Here the Employee is obtained by natural-id lookup and the Employee's Project data is fetched eagerly.
+ If the Employee data is resolved from cache, the Project data is resolved on its own. However,
+ if the Employee data is not resolved in cache, the Employee and Project data is resolved in one
+ SQL query via join as we saw above.
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Department.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Department.java
new file mode 100644
index 0000000000..06eab06a51
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Department.java
@@ -0,0 +1,10 @@
+@Entity
+public class Department {
+ @Id
+ private Long id;
+
+ @OneToMany(mappedBy="department")
+ private List employees;
+
+ ...
+}
\ No newline at end of file
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Employee.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Employee.java
new file mode 100644
index 0000000000..62ed3e54e0
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Employee.java
@@ -0,0 +1,24 @@
+@Entity
+public class Employee {
+ @Id
+ private Long id;
+
+ @NaturalId
+ private String userid;
+
+ @Column( name="pswd" )
+ @ColumnTransformer( read="decrypt(pswd)" write="encrypt(?)" )
+ private String password;
+
+ private int accessLevel;
+
+ @ManyToOne( fetch=LAZY )
+ @JoinColumn
+ private Department department;
+
+ @ManyToMany(mappedBy="employees")
+ @JoinColumn
+ private Set projects;
+
+ ...
+}
\ No newline at end of file
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/FetchOverrides.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/FetchOverrides.java
new file mode 100644
index 0000000000..4144ea27dc
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/FetchOverrides.java
@@ -0,0 +1,10 @@
+@FetchProfile(
+ name="employee.projects",
+ fetchOverrides={
+ @FetchOverride(
+ entity=Employee.class,
+ association="projects",
+ mode=JOIN
+ )
+ }
+)
\ No newline at end of file
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Login.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Login.java
new file mode 100644
index 0000000000..f916bb847a
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Login.java
@@ -0,0 +1,4 @@
+String loginHql = "select e from Employee e where e.userid = :userid and e.password = :password";
+Employee employee = (Employee) session.createQuery( loginHql )
+ ...
+ .uniqueResult();
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/LoginScalar.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/LoginScalar.java
new file mode 100644
index 0000000000..8905b0ce4a
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/LoginScalar.java
@@ -0,0 +1,4 @@
+String loginHql = "select e.accessLevel from Employee e where e.userid = :userid and e.password = :password";
+Employee employee = (Employee) session.createQuery( loginHql )
+ ...
+ .uniqueResult();
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Project.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Project.java
new file mode 100644
index 0000000000..94fe42c0d5
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Project.java
@@ -0,0 +1,10 @@
+@Entity
+public class Project {
+ @Id
+ private Long id;
+
+ @ManyToMany
+ private Set employees;
+
+ ...
+}
\ No newline at end of file
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeCriteria.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeCriteria.java
new file mode 100644
index 0000000000..384d964e07
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeCriteria.java
@@ -0,0 +1,10 @@
+String userid = ...;
+CriteriaBuilder cb = entityManager.getCriteriaBuilder();
+CriteriaQuery criteria = cb.createQuery( Employee.class );
+Root root = criteria.from( Employee.class );
+root.fetch( Employee_.projects );
+criteria.select( root );
+criteria.where(
+ cb.equal( root.get( Employee_.userid ), cb.literal( userid ) )
+);
+Employee e = entityManager.createQuery( criteria ).getSingleResult();
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeFetchProfile.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeFetchProfile.java
new file mode 100644
index 0000000000..297cb8cfc6
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeFetchProfile.java
@@ -0,0 +1,4 @@
+String userid = ...;
+session.enableFetchProfile( "employee.projects" );
+Employee e = (Employee) session.bySimpleNaturalId( Employee.class )
+ .load( userid );
\ No newline at end of file
diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeHql.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeHql.java
new file mode 100644
index 0000000000..11235281d0
--- /dev/null
+++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeHql.java
@@ -0,0 +1,5 @@
+String userid = ...;
+String hql = "select e from Employee e join fetch e.projects where e.userid = :userid";
+Employee e = (Employee) session.createQuery( hql )
+ .setParameter( "userid", userid )
+ .uniqueResult();
diff --git a/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java b/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java
index 43d9b5acdb..f8917f4653 100644
--- a/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java
+++ b/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java
@@ -72,7 +72,7 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
* @param session The session to which this request is tied.
* @param affectedQueryables The affected entity persisters.
*/
- public BulkOperationCleanupAction(SessionImplementor session, Queryable[] affectedQueryables) {
+ public BulkOperationCleanupAction(SessionImplementor session, Queryable... affectedQueryables) {
SessionFactoryImplementor factory = session.getFactory();
LinkedHashSet spacesList = new LinkedHashSet();
for ( Queryable persister : affectedQueryables ) {
diff --git a/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java b/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java
index 52c4795d92..31c565ce15 100644
--- a/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java
+++ b/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java
@@ -73,7 +73,8 @@ public final class CollectionUpdateAction extends CollectionAction {
if (affectedByFilters) {
throw new HibernateException(
"cannot recreate collection while filter is enabled: " +
- MessageHelper.collectionInfoString( persister, id, persister.getFactory() )
+ MessageHelper.collectionInfoString(persister, collection,
+ id, session )
);
}
if ( !emptySnapshot ) persister.remove( id, session );
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java b/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java
index ddb2703690..af2e72b8ca 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java
@@ -36,9 +36,13 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
* Prefer the standard {@link javax.persistence.Access} annotation
*
* @author Emmanuel Bernard
+ *
+ * @deprecated Use {@link AttributeAccessor} instead; renamed to avoid confusion with the JPA
+ * {@link javax.persistence.AccessType} enum.
*/
@Target({ TYPE, METHOD, FIELD })
@Retention(RUNTIME)
+@Deprecated
public @interface AccessType {
String value();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Any.java b/hibernate-core/src/main/java/org/hibernate/annotations/Any.java
index 8ab0e1a308..7ec5a179f5 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/Any.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/Any.java
@@ -31,11 +31,32 @@ import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Define a ToOne association pointing to several entity types.
- * Matching the according entity type is doe through a metadata discriminator column
- * This kind of mapping should be only marginal.
- *
+ * Defines a ToOne-style association pointing to one of several entity types depending on a local discriminator,
+ * as opposed to discriminated inheritance where the discriminator is kept as part of the entity hierarchy.
+ *
+ * For example, if you consider an Order entity containing Payment information where Payment might be of type
+ * CashPayment or CreditCardPayment the @Any approach would be to keep that discriminator and matching value on the
+ * Order itself. Thought of another way, the "foreign-key" really is made up of the value and discriminator
+ * (there is no physical foreign key here as databases do not support this):
+ *
+ *
* @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
@java.lang.annotation.Target({METHOD, FIELD})
@Retention(RUNTIME)
@@ -48,10 +69,10 @@ public @interface Any {
String metaDef() default "";
/**
- * Metadata discriminator column description, This column will hold the meta value corresponding to the
- * targeted entity.
+ * Identifies the discriminator column. This column will hold the value that identifies the targeted entity.
*/
Column metaColumn();
+
/**
* Defines whether the value of the field or property should be lazily loaded or must be
* eagerly fetched. The EAGER strategy is a requirement on the persistence provider runtime
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java
index cc50fa9425..e7cac8a031 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java
@@ -31,9 +31,12 @@ import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Defines @Any and @manyToAny metadata
+ * Used to provide metadata about an {@link Any} or {@link ManyToAny} mapping.
+ *
+ * @see AnyMetaDefs
*
* @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
@java.lang.annotation.Target( { PACKAGE, TYPE, METHOD, FIELD } )
@Retention( RUNTIME )
@@ -45,18 +48,18 @@ public @interface AnyMetaDef {
String name() default "";
/**
- * meta discriminator Hibernate type
+ * Names the discriminator Hibernate Type for this Any/ManyToAny mapping. The default is to use
+ * {@link org.hibernate.type.StringType}
*/
String metaType();
/**
- * Hibernate type of the id column
- * @return Hibernate type of the id column
+ * Names the identifier Hibernate Type for the entity associated through this Any/ManyToAny mapping.
*/
String idType();
/**
- * Matching discriminator values with their respective entity
+ * Maps discriminator values to the matching corresponding entity types.
*/
MetaValue[] metaValues();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java
index 268418bd0e..0de690efa7 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.annotations;
+
import java.lang.annotation.Retention;
import static java.lang.annotation.ElementType.PACKAGE;
@@ -29,10 +30,10 @@ import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Defines @Any and @ManyToAny set of metadata.
- * Can be defined at the entity level or the package level
+ * Used to group together {@link AnyMetaDef} annotations. Can be defined at the entity or package level
*
* @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
@java.lang.annotation.Target( { PACKAGE, TYPE } )
@Retention( RUNTIME )
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AttributeAccessor.java b/hibernate-core/src/main/java/org/hibernate/annotations/AttributeAccessor.java
new file mode 100644
index 0000000000..32991f4865
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/AttributeAccessor.java
@@ -0,0 +1,61 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.annotations;
+
+import java.lang.annotation.Retention;
+
+import static java.lang.annotation.ElementType.FIELD;
+import static java.lang.annotation.ElementType.METHOD;
+import static java.lang.annotation.ElementType.TYPE;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+/**
+ * Names a {@link org.hibernate.property.PropertyAccessor} strategy to use.
+ *
+ * Can be specified at either:
+ *
+ * TYPE level, which will act as naming the default accessor strategy for
+ * all attributes on the class which do not explicitly name an accessor strategy
+ *
+ *
+ * METHOD/FIELD level, which will be in effect for just that attribute.
+ *
+ *
+ *
+ * Should only be used to name custom {@link org.hibernate.property.PropertyAccessor}. For {@code property/field}
+ * access, the JPA {@link javax.persistence.Access} annotation should be preferred using the appropriate
+ * {@link javax.persistence.AccessType}. However, if this annotation is used with either {@code value="property"}
+ * or {@code value="field"}, it will act just as the corresponding usage of {@link javax.persistence.Access}.
+ *
+ * @author Steve Ebersole
+ * @author Emmanuel Bernard
+ */
+@java.lang.annotation.Target({ TYPE, METHOD, FIELD })
+@Retention(RUNTIME)
+public @interface AttributeAccessor {
+ /**
+ * Names the {@link org.hibernate.property.PropertyAccessor} strategy
+ */
+ String value();
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java b/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java
index 0219574aaf..414af5199c 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.annotations;
+
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@@ -31,13 +32,31 @@ import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Batch size for SQL loading
+ * Defines size for batch loading of collections or lazy entities. For example...
+ *
+ * will initialize up to 5 lazy collections of products at a time
*
* @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
@Target({TYPE, METHOD, FIELD})
@Retention(RUNTIME)
public @interface BatchSize {
- /** Strictly positive integer */
+ /**
+ * Strictly positive integer
+ */
int size();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java b/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java
index 2e3762292e..406adb7948 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java
@@ -30,7 +30,12 @@ import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Apply a cascade strategy on an association
+ * Apply a cascade strategy on an association. Used to apply Hibernate specific cascades. For JPA cascading, prefer
+ * using {@link javax.persistence.CascadeType} on {@link javax.persistence.OneToOne},
+ * {@link javax.persistence.OneToMany}, etc. Hibernate will merge together both sets of cascades.
+ *
+ * @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
@Target({METHOD, FIELD})
@Retention(RUNTIME)
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java b/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java
index 0cf7b204fd..2767b824c2 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java
@@ -25,7 +25,7 @@ package org.hibernate.annotations;
/**
- * Cascade types (can override default EJB3 cascades
+ * Cascade types (can override default JPA cascades
*/
public enum CascadeType {
ALL,
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Check.java b/hibernate-core/src/main/java/org/hibernate/annotations/Check.java
index 068722cada..8230af6e35 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/Check.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/Check.java
@@ -32,8 +32,7 @@ import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Arbitrary SQL check constraints which can be defined at the class,
- * property or collection level
+ * Arbitrary SQL CHECK constraints which can be defined at the class, property or collection level
*
* @author Emmanuel Bernard
*/
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java b/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java
index 199ab14ff9..5bbe42d538 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java
@@ -30,7 +30,8 @@ import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Names a custom collection type for a persistent collection.
+ * Names a custom collection type for a persistent collection. The collection can also name a @Type, which defines
+ * the Hibernate Type of the collection elements.
*
* @see org.hibernate.type.CollectionType
* @see org.hibernate.usertype.UserCollectionType
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java b/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java
index 810d2b5dbf..0d9c1c4486 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java
@@ -34,7 +34,9 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
* The write expression must contain exactly one '?' placeholder for the value.
*
* For example: read="decrypt(credit_card_num)" write="encrypt(?)"
- *
+ *
+ * @see ColumnTransformers
+ *
* @author Emmanuel Bernard
*/
@java.lang.annotation.Target({FIELD,METHOD})
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java b/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java
index 6761631fa9..c272298480 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java
@@ -29,11 +29,15 @@ import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Discriminator formula
- * To be placed at the root entity.
+ * Used to apply a Hibernate formula (derived value) as the inheritance discriminator "column". Used in place of
+ * the JPA {@link javax.persistence.DiscriminatorColumn} when a formula is wanted.
+ *
+ * To be placed on the root entity.
+ *
+ * @see Formula
*
* @author Emmanuel Bernard
- * @see Formula
+ * @author Steve Ebersole
*/
@Target({TYPE})
@Retention(RUNTIME)
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java b/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java
index 73a496276e..813d8fd94b 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java
@@ -30,10 +30,34 @@ import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Formula. To be used as a replacement for @Column in most places
- * The formula has to be a valid SQL fragment
+ * Defines a formula (derived value) which is a SQL fragment that acts as a @Column alternative in most cases.
+ * Represents read-only state.
+ *
+ * In certain cases @ColumnTransformer might be a better option, especially as it leaves open the option of still
+ * being writable.
+ *
+ *
+ * // this might be better handled through @ColumnTransformer
+ * @Formula( "decrypt(credit_card_num)" )
+ * String getCreditCardNumber() { ... }
+ *
+ *
+ * @see ColumnTransformer
*
* @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
@Target({METHOD, FIELD})
@Retention(RUNTIME)
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java b/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java
index bb20cc1e00..6fae415eb1 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java
@@ -22,20 +22,23 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.annotations;
-import java.lang.annotation.Retention;
+
import javax.persistence.Column;
import javax.persistence.FetchType;
+import java.lang.annotation.Retention;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
- * Defined a ToMany association pointing to different entity types.
- * Matching the according entity type is doe through a metadata discriminator column
- * This kind of mapping should be only marginal.
+ * This is the collection-valued form of @Any definitions. Defines a ToMany-style association pointing
+ * to one of several entity types depending on a local discriminator. See {@link Any} for further information.
+ *
+ * @see Any
*
* @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
@java.lang.annotation.Target({METHOD, FIELD})
@Retention(RUNTIME)
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java b/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java
index d6fa60c89e..e9428b019d 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java
@@ -23,10 +23,13 @@
*/
package org.hibernate.annotations;
-
/**
- * Represent a discriminator value associated to a given entity type
+ * Maps a given discriminator value to the corresponding entity type. See {@link Any} for more information.
+ *
+ * @see Any
+ *
* @author Emmanuel Bernard
+ * @author Steve Ebersole
*/
public @interface MetaValue {
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java b/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java
index 6db8a97db5..7d9eeeb36e 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java
@@ -92,6 +92,10 @@ public class StandardServiceRegistryBuilder {
return initiators;
}
+ public BootstrapServiceRegistry getBootstrapServiceRegistry() {
+ return bootstrapServiceRegistry;
+ }
+
/**
* Read settings from a {@link Properties} file. Differs from {@link #configure()} and {@link #configure(String)}
* in that here we read a {@link Properties} file while for {@link #configure} we read the XML variant.
@@ -224,6 +228,15 @@ public class StandardServiceRegistryBuilder {
}
}
+ /**
+ * Temporarily exposed since Configuration is still around and much code still uses Configuration. This allows
+ * code to configure the builder and access that to configure Configuration object (used from HEM atm).
+ */
+ @Deprecated
+ public Map getSettings() {
+ return settings;
+ }
+
/**
* Destroy a service registry. Applications should only destroy registries they have explicitly created.
*
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java b/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java
index 6404478a06..d24b50f3f1 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java
@@ -94,6 +94,9 @@ import org.hibernate.engine.transaction.jta.platform.internal.WebSphereJtaPlatfo
import org.hibernate.engine.transaction.jta.platform.internal.WeblogicJtaPlatform;
import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
import org.hibernate.engine.transaction.spi.TransactionFactory;
+import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
+import org.hibernate.hql.spi.PersistentTableBulkIdStrategy;
+import org.hibernate.hql.spi.TemporaryTableBulkIdStrategy;
/**
* @author Steve Ebersole
@@ -131,6 +134,7 @@ public class StrategySelectorBuilder {
addDialects( strategySelector );
addJtaPlatforms( strategySelector );
addTransactionFactories( strategySelector );
+ addMultiTableBulkIdStrategies( strategySelector );
// apply auto-discovered registrations
for ( AvailabilityAnnouncer announcer : classLoaderService.loadJavaServices( AvailabilityAnnouncer.class ) ) {
@@ -327,4 +331,17 @@ public class StrategySelectorBuilder {
strategySelector.registerStrategyImplementor( TransactionFactory.class, CMTTransactionFactory.SHORT_NAME, CMTTransactionFactory.class );
strategySelector.registerStrategyImplementor( TransactionFactory.class, "org.hibernate.transaction.CMTTransactionFactory", CMTTransactionFactory.class );
}
+
+ private void addMultiTableBulkIdStrategies(StrategySelectorImpl strategySelector) {
+ strategySelector.registerStrategyImplementor(
+ MultiTableBulkIdStrategy.class,
+ PersistentTableBulkIdStrategy.SHORT_NAME,
+ PersistentTableBulkIdStrategy.class
+ );
+ strategySelector.registerStrategyImplementor(
+ MultiTableBulkIdStrategy.class,
+ TemporaryTableBulkIdStrategy.SHORT_NAME,
+ TemporaryTableBulkIdStrategy.class
+ );
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java b/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java
index e3c4bd8513..1182ed9d18 100644
--- a/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java
+++ b/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java
@@ -25,13 +25,9 @@ package org.hibernate.bytecode.buildtime.internal;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileInputStream;
import java.io.IOException;
import java.util.Set;
-import javassist.ClassClassPath;
-import javassist.ClassPool;
import javassist.bytecode.ClassFile;
import org.hibernate.bytecode.buildtime.spi.AbstractInstrumenter;
@@ -48,7 +44,6 @@ import org.hibernate.bytecode.spi.ClassTransformer;
*
* @author Steve Ebersole
* @author Muga Nishizawa
- * @author Dustin Schultz
*/
public class JavassistInstrumenter extends AbstractInstrumenter {
@@ -75,20 +70,6 @@ public class JavassistInstrumenter extends AbstractInstrumenter {
return provider.getTransformer( CLASS_FILTER, new CustomFieldFilter( descriptor, classNames ) );
}
}
-
- @Override
- public void execute(Set files) {
- ClassPool cp = ClassPool.getDefault();
- cp.insertClassPath(new ClassClassPath(this.getClass()));
- try {
- for (File file : files) {
- cp.makeClass(new FileInputStream(file));
- }
- } catch (IOException e) {
- throw new RuntimeException(e.getMessage(), e);
- }
- super.execute(files);
- }
private static class CustomClassDescriptor implements ClassDescriptor {
private final byte[] bytes;
diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java b/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java
index 300238fee4..2e547c3627 100644
--- a/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java
+++ b/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java
@@ -32,7 +32,6 @@ import java.util.Iterator;
import java.util.List;
import javassist.CannotCompileException;
-import javassist.ClassPool;
import javassist.bytecode.AccessFlag;
import javassist.bytecode.BadBytecode;
import javassist.bytecode.Bytecode;
@@ -44,8 +43,6 @@ import javassist.bytecode.Descriptor;
import javassist.bytecode.FieldInfo;
import javassist.bytecode.MethodInfo;
import javassist.bytecode.Opcode;
-import javassist.bytecode.StackMapTable;
-import javassist.bytecode.stackmap.MapMaker;
/**
* The thing that handles actual class enhancement in regards to
@@ -53,7 +50,6 @@ import javassist.bytecode.stackmap.MapMaker;
*
* @author Muga Nishizawa
* @author Steve Ebersole
- * @author Dustin Schultz
*/
public class FieldTransformer {
@@ -134,7 +130,7 @@ public class FieldTransformer {
}
private void addGetFieldHandlerMethod(ClassFile classfile)
- throws CannotCompileException, BadBytecode {
+ throws CannotCompileException {
ConstPool cp = classfile.getConstPool();
int this_class_index = cp.getThisClassInfo();
MethodInfo minfo = new MethodInfo(cp, GETFIELDHANDLER_METHOD_NAME,
@@ -152,13 +148,11 @@ public class FieldTransformer {
code.addOpcode(Opcode.ARETURN);
minfo.setCodeAttribute(code.toCodeAttribute());
minfo.setAccessFlags(AccessFlag.PUBLIC);
- StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo);
- minfo.getCodeAttribute().setAttribute(smt);
classfile.addMethod(minfo);
}
private void addSetFieldHandlerMethod(ClassFile classfile)
- throws CannotCompileException, BadBytecode {
+ throws CannotCompileException {
ConstPool cp = classfile.getConstPool();
int this_class_index = cp.getThisClassInfo();
MethodInfo minfo = new MethodInfo(cp, SETFIELDHANDLER_METHOD_NAME,
@@ -178,8 +172,6 @@ public class FieldTransformer {
code.addOpcode(Opcode.RETURN);
minfo.setCodeAttribute(code.toCodeAttribute());
minfo.setAccessFlags(AccessFlag.PUBLIC);
- StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo);
- minfo.getCodeAttribute().setAttribute(smt);
classfile.addMethod(minfo);
}
@@ -193,7 +185,7 @@ public class FieldTransformer {
}
private void addReadWriteMethods(ClassFile classfile)
- throws CannotCompileException, BadBytecode {
+ throws CannotCompileException {
List fields = classfile.getFields();
for (Iterator field_iter = fields.iterator(); field_iter.hasNext();) {
FieldInfo finfo = (FieldInfo) field_iter.next();
@@ -213,7 +205,7 @@ public class FieldTransformer {
}
private void addReadMethod(ClassFile classfile, FieldInfo finfo)
- throws CannotCompileException, BadBytecode {
+ throws CannotCompileException {
ConstPool cp = classfile.getConstPool();
int this_class_index = cp.getThisClassInfo();
String desc = "()" + finfo.getDescriptor();
@@ -262,13 +254,11 @@ public class FieldTransformer {
minfo.setCodeAttribute(code.toCodeAttribute());
minfo.setAccessFlags(AccessFlag.PUBLIC);
- StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo);
- minfo.getCodeAttribute().setAttribute(smt);
classfile.addMethod(minfo);
}
private void addWriteMethod(ClassFile classfile, FieldInfo finfo)
- throws CannotCompileException, BadBytecode {
+ throws CannotCompileException {
ConstPool cp = classfile.getConstPool();
int this_class_index = cp.getThisClassInfo();
String desc = "(" + finfo.getDescriptor() + ")V";
@@ -330,13 +320,11 @@ public class FieldTransformer {
minfo.setCodeAttribute(code.toCodeAttribute());
minfo.setAccessFlags(AccessFlag.PUBLIC);
- StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo);
- minfo.getCodeAttribute().setAttribute(smt);
classfile.addMethod(minfo);
}
private void transformInvokevirtualsIntoPutAndGetfields(ClassFile classfile)
- throws CannotCompileException, BadBytecode {
+ throws CannotCompileException {
List methods = classfile.getMethods();
for (Iterator method_iter = methods.iterator(); method_iter.hasNext();) {
MethodInfo minfo = (MethodInfo) method_iter.next();
@@ -353,13 +341,15 @@ public class FieldTransformer {
}
CodeIterator iter = codeAttr.iterator();
while (iter.hasNext()) {
- int pos = iter.next();
- pos = transformInvokevirtualsIntoGetfields(classfile, iter, pos);
- pos = transformInvokevirtualsIntoPutfields(classfile, iter, pos);
+ try {
+ int pos = iter.next();
+ pos = transformInvokevirtualsIntoGetfields(classfile, iter, pos);
+ pos = transformInvokevirtualsIntoPutfields(classfile, iter, pos);
+ } catch ( BadBytecode e ){
+ throw new CannotCompileException( e );
+ }
+
}
-
- StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo);
- minfo.getCodeAttribute().setAttribute(smt);
}
}
diff --git a/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java b/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java
index fdcea19270..b5a81b36e3 100644
--- a/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java
+++ b/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java
@@ -64,6 +64,8 @@ public class StandardQueryCache implements QueryCache {
StandardQueryCache.class.getName()
);
+ private static final boolean tracing = LOG.isTraceEnabled();
+
private QueryResultsRegion cacheRegion;
private UpdateTimestampsCache updateTimestampsCache;
@@ -246,7 +248,7 @@ public class StandardQueryCache implements QueryCache {
}
private static void logCachedResultRowDetails(Type[] returnTypes, Object[] tuple) {
- if ( !LOG.isTraceEnabled() ) {
+ if ( !tracing ) {
return;
}
if ( tuple == null ) {
diff --git a/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java b/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java
index b1ee74cf1c..2be27840ce 100644
--- a/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java
+++ b/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java
@@ -26,7 +26,6 @@ package org.hibernate.cache.spi;
import java.io.Serializable;
import java.util.Properties;
import java.util.Set;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.jboss.logging.Logger;
@@ -51,100 +50,96 @@ public class UpdateTimestampsCache {
public static final String REGION_NAME = UpdateTimestampsCache.class.getName();
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, UpdateTimestampsCache.class.getName() );
- private ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
- private final TimestampsRegion region;
private final SessionFactoryImplementor factory;
+ private final TimestampsRegion region;
public UpdateTimestampsCache(Settings settings, Properties props, final SessionFactoryImplementor factory) throws HibernateException {
this.factory = factory;
- String prefix = settings.getCacheRegionPrefix();
- String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME;
+ final String prefix = settings.getCacheRegionPrefix();
+ final String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME;
+
LOG.startingUpdateTimestampsCache( regionName );
this.region = factory.getServiceRegistry().getService( RegionFactory.class ).buildTimestampsRegion( regionName, props );
}
+
@SuppressWarnings({"UnusedDeclaration"})
- public UpdateTimestampsCache(Settings settings, Properties props)
- throws HibernateException {
- this(settings, props, null);
+ public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException {
+ this( settings, props, null );
}
@SuppressWarnings({"UnnecessaryBoxing"})
public void preinvalidate(Serializable[] spaces) throws CacheException {
- readWriteLock.writeLock().lock();
+ final boolean debug = LOG.isDebugEnabled();
+ final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled();
- try {
- Long ts = region.nextTimestamp() + region.getTimeout();
- for ( Serializable space : spaces ) {
+ final Long ts = region.nextTimestamp() + region.getTimeout();
+
+ for ( Serializable space : spaces ) {
+ if ( debug ) {
LOG.debugf( "Pre-invalidating space [%s], timestamp: %s", space, ts );
- //put() has nowait semantics, is this really appropriate?
- //note that it needs to be async replication, never local or sync
- region.put( space, ts );
- if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) {
- factory.getStatisticsImplementor().updateTimestampsCachePut();
- }
}
- }
- finally {
- readWriteLock.writeLock().unlock();
+ //put() has nowait semantics, is this really appropriate?
+ //note that it needs to be async replication, never local or sync
+ region.put( space, ts );
+ if ( stats ) {
+ factory.getStatisticsImplementor().updateTimestampsCachePut();
+ }
}
}
- @SuppressWarnings({"UnnecessaryBoxing"})
+ @SuppressWarnings({"UnnecessaryBoxing"})
public void invalidate(Serializable[] spaces) throws CacheException {
- readWriteLock.writeLock().lock();
+ final boolean debug = LOG.isDebugEnabled();
+ final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled();
- try {
- Long ts = region.nextTimestamp();
- for (Serializable space : spaces) {
+ final Long ts = region.nextTimestamp();
+
+ for (Serializable space : spaces) {
+ if ( debug ) {
LOG.debugf( "Invalidating space [%s], timestamp: %s", space, ts );
- //put() has nowait semantics, is this really appropriate?
- //note that it needs to be async replication, never local or sync
- region.put( space, ts );
- if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) {
- factory.getStatisticsImplementor().updateTimestampsCachePut();
- }
}
- }
- finally {
- readWriteLock.writeLock().unlock();
+ //put() has nowait semantics, is this really appropriate?
+ //note that it needs to be async replication, never local or sync
+ region.put( space, ts );
+ if ( stats ) {
+ factory.getStatisticsImplementor().updateTimestampsCachePut();
+ }
}
}
@SuppressWarnings({"unchecked", "UnnecessaryUnboxing"})
public boolean isUpToDate(Set spaces, Long timestamp) throws HibernateException {
- readWriteLock.readLock().lock();
+ final boolean debug = LOG.isDebugEnabled();
+ final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled();
- try {
- for ( Serializable space : (Set) spaces ) {
- Long lastUpdate = (Long) region.get( space );
- if ( lastUpdate == null ) {
- if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) {
- factory.getStatisticsImplementor().updateTimestampsCacheMiss();
- }
- //the last update timestamp was lost from the cache
- //(or there were no updates since startup!)
- //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) );
- //result = false; // safer
+ for ( Serializable space : (Set) spaces ) {
+ Long lastUpdate = (Long) region.get( space );
+ if ( lastUpdate == null ) {
+ if ( stats ) {
+ factory.getStatisticsImplementor().updateTimestampsCacheMiss();
}
- else {
- if ( LOG.isDebugEnabled() ) {
- LOG.debugf(
- "[%s] last update timestamp: %s",
- space,
- lastUpdate + ", result set timestamp: " + timestamp
- );
- }
- if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) {
- factory.getStatisticsImplementor().updateTimestampsCacheHit();
- }
- if ( lastUpdate >= timestamp ) return false;
+ //the last update timestamp was lost from the cache
+ //(or there were no updates since startup!)
+ //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) );
+ //result = false; // safer
+ }
+ else {
+ if ( debug ) {
+ LOG.debugf(
+ "[%s] last update timestamp: %s",
+ space,
+ lastUpdate + ", result set timestamp: " + timestamp
+ );
+ }
+ if ( stats ) {
+ factory.getStatisticsImplementor().updateTimestampsCacheHit();
+ }
+ if ( lastUpdate >= timestamp ) {
+ return false;
}
}
- return true;
- }
- finally {
- readWriteLock.readLock().unlock();
}
+ return true;
}
public void clear() throws CacheException {
diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java b/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java
index 7d5b8848fe..d71d65d421 100644
--- a/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java
+++ b/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java
@@ -269,7 +269,7 @@ public interface AvailableSettings {
public static final String CURRENT_SESSION_CONTEXT_CLASS = "hibernate.current_session_context_class";
/**
- * Names the implementation of {@link org.hibernate.engine.transaction.spi.TransactionContext} to use for
+ * Names the implementation of {@link org.hibernate.engine.transaction.spi.TransactionFactory} to use for
* creating {@link org.hibernate.Transaction} instances
*/
public static final String TRANSACTION_STRATEGY = "hibernate.transaction.factory_class";
@@ -643,4 +643,13 @@ public interface AvailableSettings {
// todo : add to Environment
String SCHEMA_NAME_RESOLVER = "hibernate.schema_name_resolver";
public static final String ENABLE_LAZY_LOAD_NO_TRANS = "hibernate.enable_lazy_load_no_trans";
+
+ public static final String HQL_BULK_ID_STRATEGY = "hibernate.hql.bulk_id_strategy";
+
+ /**
+ * Names the {@link org.hibernate.loader.BatchFetchStyle} to use. Can specify either the
+ * {@link org.hibernate.loader.BatchFetchStyle} name (insensitively), or a
+ * {@link org.hibernate.loader.BatchFetchStyle} instance.
+ */
+ public static final String BATCH_FETCH_STYLE = "hibernate.batch_fetch_style";
}
diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java b/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java
index 5163259131..e361467be3 100644
--- a/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java
+++ b/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java
@@ -2418,7 +2418,9 @@ public class Configuration implements Serializable {
}
public void addSqlFunction(String functionName, SQLFunction function) {
- sqlFunctions.put( functionName, function );
+ // HHH-7721: SQLFunctionRegistry expects all lowercase. Enforce,
+ // just in case a user's customer dialect uses mixed cases.
+ sqlFunctions.put( functionName.toLowerCase(), function );
}
public TypeResolver getTypeResolver() {
diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java b/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java
index d687f9e846..32bc128277 100644
--- a/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java
+++ b/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java
@@ -29,8 +29,10 @@ import org.hibernate.ConnectionReleaseMode;
import org.hibernate.EntityMode;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.cache.spi.QueryCacheFactory;
-import org.hibernate.hql.spi.QueryTranslatorFactory;
import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
+import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
+import org.hibernate.hql.spi.QueryTranslatorFactory;
+import org.hibernate.loader.BatchFetchStyle;
import org.hibernate.tuple.entity.EntityTuplizerFactory;
/**
@@ -77,6 +79,7 @@ public final class Settings {
private boolean namedQueryStartupCheckingEnabled;
private EntityTuplizerFactory entityTuplizerFactory;
private boolean checkNullability;
+ private boolean initializeLazyStateOutsideTransactions;
// private ComponentTuplizerFactory componentTuplizerFactory; todo : HHH-3517 and HHH-1907
// private BytecodeProvider bytecodeProvider;
private String importFiles;
@@ -84,6 +87,10 @@ public final class Settings {
private JtaPlatform jtaPlatform;
+ private MultiTableBulkIdStrategy multiTableBulkIdStrategy;
+ private BatchFetchStyle batchFetchStyle;
+
+
/**
* Package protected constructor
*/
@@ -411,4 +418,28 @@ public final class Settings {
void setMultiTenancyStrategy(MultiTenancyStrategy multiTenancyStrategy) {
this.multiTenancyStrategy = multiTenancyStrategy;
}
+
+ public boolean isInitializeLazyStateOutsideTransactionsEnabled() {
+ return initializeLazyStateOutsideTransactions;
+ }
+
+ void setInitializeLazyStateOutsideTransactions(boolean initializeLazyStateOutsideTransactions) {
+ this.initializeLazyStateOutsideTransactions = initializeLazyStateOutsideTransactions;
+ }
+
+ public MultiTableBulkIdStrategy getMultiTableBulkIdStrategy() {
+ return multiTableBulkIdStrategy;
+ }
+
+ void setMultiTableBulkIdStrategy(MultiTableBulkIdStrategy multiTableBulkIdStrategy) {
+ this.multiTableBulkIdStrategy = multiTableBulkIdStrategy;
+ }
+
+ public BatchFetchStyle getBatchFetchStyle() {
+ return batchFetchStyle;
+ }
+
+ void setBatchFetchStyle(BatchFetchStyle batchFetchStyle) {
+ this.batchFetchStyle = batchFetchStyle;
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java b/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java
index 7926e47d60..173733ba4a 100644
--- a/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java
+++ b/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java
@@ -27,30 +27,34 @@ import java.io.Serializable;
import java.util.Map;
import java.util.Properties;
-import org.jboss.logging.Logger;
-
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
import org.hibernate.MultiTenancyStrategy;
+import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
+import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.internal.NoCachingRegionFactory;
import org.hibernate.cache.internal.RegionFactoryInitiator;
import org.hibernate.cache.internal.StandardQueryCacheFactory;
import org.hibernate.cache.spi.QueryCacheFactory;
import org.hibernate.cache.spi.RegionFactory;
+import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
+import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider;
+import org.hibernate.engine.jdbc.env.spi.ExtractedDatabaseMetaData;
import org.hibernate.engine.jdbc.spi.JdbcServices;
+import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
import org.hibernate.engine.transaction.spi.TransactionFactory;
+import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
+import org.hibernate.hql.spi.PersistentTableBulkIdStrategy;
import org.hibernate.hql.spi.QueryTranslatorFactory;
+import org.hibernate.hql.spi.TemporaryTableBulkIdStrategy;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
+import org.hibernate.loader.BatchFetchStyle;
import org.hibernate.service.ServiceRegistry;
-import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
-import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
-import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider;
-import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
-import org.hibernate.engine.jdbc.env.spi.ExtractedDatabaseMetaData;
import org.hibernate.tuple.entity.EntityTuplizerFactory;
+import org.jboss.logging.Logger;
/**
* Reads configuration properties and builds a {@link Settings} instance.
@@ -75,7 +79,7 @@ public class SettingsFactory implements Serializable {
//SessionFactory name:
- String sessionFactoryName = props.getProperty( Environment.SESSION_FACTORY_NAME );
+ String sessionFactoryName = props.getProperty( AvailableSettings.SESSION_FACTORY_NAME );
settings.setSessionFactoryName( sessionFactoryName );
settings.setSessionFactoryNameAlsoJndiName(
ConfigurationHelper.getBoolean( AvailableSettings.SESSION_FACTORY_NAME_IS_JNDI, props, true )
@@ -97,13 +101,25 @@ public class SettingsFactory implements Serializable {
// Transaction settings:
settings.setJtaPlatform( serviceRegistry.getService( JtaPlatform.class ) );
- boolean flushBeforeCompletion = ConfigurationHelper.getBoolean(Environment.FLUSH_BEFORE_COMPLETION, properties);
+ MultiTableBulkIdStrategy multiTableBulkIdStrategy = serviceRegistry.getService( StrategySelector.class )
+ .resolveStrategy(
+ MultiTableBulkIdStrategy.class,
+ properties.getProperty( AvailableSettings.HQL_BULK_ID_STRATEGY )
+ );
+ if ( multiTableBulkIdStrategy == null ) {
+ multiTableBulkIdStrategy = jdbcServices.getDialect().supportsTemporaryTables()
+ ? TemporaryTableBulkIdStrategy.INSTANCE
+ : new PersistentTableBulkIdStrategy();
+ }
+ settings.setMultiTableBulkIdStrategy( multiTableBulkIdStrategy );
+
+ boolean flushBeforeCompletion = ConfigurationHelper.getBoolean(AvailableSettings.FLUSH_BEFORE_COMPLETION, properties);
if ( debugEnabled ) {
LOG.debugf( "Automatic flush during beforeCompletion(): %s", enabledDisabled(flushBeforeCompletion) );
}
settings.setFlushBeforeCompletionEnabled(flushBeforeCompletion);
- boolean autoCloseSession = ConfigurationHelper.getBoolean(Environment.AUTO_CLOSE_SESSION, properties);
+ boolean autoCloseSession = ConfigurationHelper.getBoolean(AvailableSettings.AUTO_CLOSE_SESSION, properties);
if ( debugEnabled ) {
LOG.debugf( "Automatic session close at end of transaction: %s", enabledDisabled(autoCloseSession) );
}
@@ -111,7 +127,7 @@ public class SettingsFactory implements Serializable {
//JDBC and connection settings:
- int batchSize = ConfigurationHelper.getInt(Environment.STATEMENT_BATCH_SIZE, properties, 0);
+ int batchSize = ConfigurationHelper.getInt(AvailableSettings.STATEMENT_BATCH_SIZE, properties, 0);
if ( !meta.supportsBatchUpdates() ) {
batchSize = 0;
}
@@ -120,14 +136,14 @@ public class SettingsFactory implements Serializable {
}
settings.setJdbcBatchSize(batchSize);
- boolean jdbcBatchVersionedData = ConfigurationHelper.getBoolean(Environment.BATCH_VERSIONED_DATA, properties, false);
+ boolean jdbcBatchVersionedData = ConfigurationHelper.getBoolean(AvailableSettings.BATCH_VERSIONED_DATA, properties, false);
if ( batchSize > 0 && debugEnabled ) {
LOG.debugf( "JDBC batch updates for versioned data: %s", enabledDisabled(jdbcBatchVersionedData) );
}
settings.setJdbcBatchVersionedData(jdbcBatchVersionedData);
boolean useScrollableResultSets = ConfigurationHelper.getBoolean(
- Environment.USE_SCROLLABLE_RESULTSET,
+ AvailableSettings.USE_SCROLLABLE_RESULTSET,
properties,
meta.supportsScrollableResults()
);
@@ -136,19 +152,19 @@ public class SettingsFactory implements Serializable {
}
settings.setScrollableResultSetsEnabled(useScrollableResultSets);
- boolean wrapResultSets = ConfigurationHelper.getBoolean(Environment.WRAP_RESULT_SETS, properties, false);
+ boolean wrapResultSets = ConfigurationHelper.getBoolean(AvailableSettings.WRAP_RESULT_SETS, properties, false);
if ( debugEnabled ) {
LOG.debugf( "Wrap result sets: %s", enabledDisabled(wrapResultSets) );
}
settings.setWrapResultSetsEnabled(wrapResultSets);
- boolean useGetGeneratedKeys = ConfigurationHelper.getBoolean(Environment.USE_GET_GENERATED_KEYS, properties, meta.supportsGetGeneratedKeys());
+ boolean useGetGeneratedKeys = ConfigurationHelper.getBoolean(AvailableSettings.USE_GET_GENERATED_KEYS, properties, meta.supportsGetGeneratedKeys());
if ( debugEnabled ) {
LOG.debugf( "JDBC3 getGeneratedKeys(): %s", enabledDisabled(useGetGeneratedKeys) );
}
settings.setGetGeneratedKeysEnabled(useGetGeneratedKeys);
- Integer statementFetchSize = ConfigurationHelper.getInteger(Environment.STATEMENT_FETCH_SIZE, properties);
+ Integer statementFetchSize = ConfigurationHelper.getInteger(AvailableSettings.STATEMENT_FETCH_SIZE, properties);
if ( statementFetchSize != null && debugEnabled ) {
LOG.debugf( "JDBC result set fetch size: %s", statementFetchSize );
}
@@ -160,7 +176,7 @@ public class SettingsFactory implements Serializable {
}
settings.setMultiTenancyStrategy( multiTenancyStrategy );
- String releaseModeName = ConfigurationHelper.getString( Environment.RELEASE_CONNECTIONS, properties, "auto" );
+ String releaseModeName = ConfigurationHelper.getString( AvailableSettings.RELEASE_CONNECTIONS, properties, "auto" );
if ( debugEnabled ) {
LOG.debugf( "Connection release mode: %s", releaseModeName );
}
@@ -183,10 +199,15 @@ public class SettingsFactory implements Serializable {
}
settings.setConnectionReleaseMode( releaseMode );
+ final BatchFetchStyle batchFetchStyle = BatchFetchStyle.interpret( properties.get( AvailableSettings.BATCH_FETCH_STYLE ) );
+ LOG.debugf( "Using BatchFetchStyle : " + batchFetchStyle.name() );
+ settings.setBatchFetchStyle( batchFetchStyle );
+
+
//SQL Generation settings:
- String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
- String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
+ String defaultSchema = properties.getProperty( AvailableSettings.DEFAULT_SCHEMA );
+ String defaultCatalog = properties.getProperty( AvailableSettings.DEFAULT_CATALOG );
if ( defaultSchema != null && debugEnabled ) {
LOG.debugf( "Default schema: %s", defaultSchema );
}
@@ -196,31 +217,31 @@ public class SettingsFactory implements Serializable {
settings.setDefaultSchemaName( defaultSchema );
settings.setDefaultCatalogName( defaultCatalog );
- Integer maxFetchDepth = ConfigurationHelper.getInteger( Environment.MAX_FETCH_DEPTH, properties );
+ Integer maxFetchDepth = ConfigurationHelper.getInteger( AvailableSettings.MAX_FETCH_DEPTH, properties );
if ( maxFetchDepth != null ) {
LOG.debugf( "Maximum outer join fetch depth: %s", maxFetchDepth );
}
settings.setMaximumFetchDepth( maxFetchDepth );
- int batchFetchSize = ConfigurationHelper.getInt(Environment.DEFAULT_BATCH_FETCH_SIZE, properties, 1);
+ int batchFetchSize = ConfigurationHelper.getInt(AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, properties, 1);
if ( debugEnabled ) {
LOG.debugf( "Default batch fetch size: %s", batchFetchSize );
}
settings.setDefaultBatchFetchSize( batchFetchSize );
- boolean comments = ConfigurationHelper.getBoolean( Environment.USE_SQL_COMMENTS, properties );
+ boolean comments = ConfigurationHelper.getBoolean( AvailableSettings.USE_SQL_COMMENTS, properties );
if ( debugEnabled ) {
LOG.debugf( "Generate SQL with comments: %s", enabledDisabled(comments) );
}
settings.setCommentsEnabled( comments );
- boolean orderUpdates = ConfigurationHelper.getBoolean( Environment.ORDER_UPDATES, properties );
+ boolean orderUpdates = ConfigurationHelper.getBoolean( AvailableSettings.ORDER_UPDATES, properties );
if ( debugEnabled ) {
LOG.debugf( "Order SQL updates by primary key: %s", enabledDisabled(orderUpdates) );
}
settings.setOrderUpdatesEnabled( orderUpdates );
- boolean orderInserts = ConfigurationHelper.getBoolean(Environment.ORDER_INSERTS, properties);
+ boolean orderInserts = ConfigurationHelper.getBoolean(AvailableSettings.ORDER_INSERTS, properties);
if ( debugEnabled ) {
LOG.debugf( "Order SQL inserts for batching: %s", enabledDisabled(orderInserts) );
}
@@ -230,13 +251,13 @@ public class SettingsFactory implements Serializable {
settings.setQueryTranslatorFactory( createQueryTranslatorFactory( properties, serviceRegistry ) );
- Map querySubstitutions = ConfigurationHelper.toMap( Environment.QUERY_SUBSTITUTIONS, " ,=;:\n\t\r\f", properties );
+ Map querySubstitutions = ConfigurationHelper.toMap( AvailableSettings.QUERY_SUBSTITUTIONS, " ,=;:\n\t\r\f", properties );
if ( debugEnabled ) {
LOG.debugf( "Query language substitutions: %s", querySubstitutions );
}
settings.setQuerySubstitutions( querySubstitutions );
- boolean jpaqlCompliance = ConfigurationHelper.getBoolean( Environment.JPAQL_STRICT_COMPLIANCE, properties, false );
+ boolean jpaqlCompliance = ConfigurationHelper.getBoolean( AvailableSettings.JPAQL_STRICT_COMPLIANCE, properties, false );
if ( debugEnabled ) {
LOG.debugf( "JPA-QL strict compliance: %s", enabledDisabled(jpaqlCompliance) );
}
@@ -244,13 +265,13 @@ public class SettingsFactory implements Serializable {
// Second-level / query cache:
- boolean useSecondLevelCache = ConfigurationHelper.getBoolean( Environment.USE_SECOND_LEVEL_CACHE, properties, true );
+ boolean useSecondLevelCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_SECOND_LEVEL_CACHE, properties, true );
if ( debugEnabled ) {
LOG.debugf( "Second-level cache: %s", enabledDisabled(useSecondLevelCache) );
}
settings.setSecondLevelCacheEnabled( useSecondLevelCache );
- boolean useQueryCache = ConfigurationHelper.getBoolean(Environment.USE_QUERY_CACHE, properties);
+ boolean useQueryCache = ConfigurationHelper.getBoolean(AvailableSettings.USE_QUERY_CACHE, properties);
if ( debugEnabled ) {
LOG.debugf( "Query cache: %s", enabledDisabled(useQueryCache) );
}
@@ -268,13 +289,13 @@ public class SettingsFactory implements Serializable {
}
settings.setCacheRegionPrefix( prefix );
- boolean useStructuredCacheEntries = ConfigurationHelper.getBoolean( Environment.USE_STRUCTURED_CACHE, properties, false );
+ boolean useStructuredCacheEntries = ConfigurationHelper.getBoolean( AvailableSettings.USE_STRUCTURED_CACHE, properties, false );
if ( debugEnabled ) {
LOG.debugf( "Structured second-level cache entries: %s", enabledDisabled(useStructuredCacheEntries) );
}
settings.setStructuredCacheEntriesEnabled( useStructuredCacheEntries );
- boolean useIdentifierRollback = ConfigurationHelper.getBoolean( Environment.USE_IDENTIFIER_ROLLBACK, properties );
+ boolean useIdentifierRollback = ConfigurationHelper.getBoolean( AvailableSettings.USE_IDENTIFIER_ROLLBACK, properties );
if ( debugEnabled ) {
LOG.debugf( "Deleted entity synthetic identifier rollback: %s", enabledDisabled(useIdentifierRollback) );
}
@@ -282,7 +303,7 @@ public class SettingsFactory implements Serializable {
//Schema export:
- String autoSchemaExport = properties.getProperty( Environment.HBM2DDL_AUTO );
+ String autoSchemaExport = properties.getProperty( AvailableSettings.HBM2DDL_AUTO );
if ( "validate".equals(autoSchemaExport) ) {
settings.setAutoValidateSchema( true );
}
@@ -296,21 +317,21 @@ public class SettingsFactory implements Serializable {
settings.setAutoCreateSchema( true );
settings.setAutoDropSchema( true );
}
- settings.setImportFiles( properties.getProperty( Environment.HBM2DDL_IMPORT_FILES ) );
+ settings.setImportFiles( properties.getProperty( AvailableSettings.HBM2DDL_IMPORT_FILES ) );
- EntityMode defaultEntityMode = EntityMode.parse( properties.getProperty( Environment.DEFAULT_ENTITY_MODE ) );
+ EntityMode defaultEntityMode = EntityMode.parse( properties.getProperty( AvailableSettings.DEFAULT_ENTITY_MODE ) );
if ( debugEnabled ) {
LOG.debugf( "Default entity-mode: %s", defaultEntityMode );
}
settings.setDefaultEntityMode( defaultEntityMode );
- boolean namedQueryChecking = ConfigurationHelper.getBoolean( Environment.QUERY_STARTUP_CHECKING, properties, true );
+ boolean namedQueryChecking = ConfigurationHelper.getBoolean( AvailableSettings.QUERY_STARTUP_CHECKING, properties, true );
if ( debugEnabled ) {
LOG.debugf( "Named query checking : %s", enabledDisabled(namedQueryChecking) );
}
settings.setNamedQueryStartupCheckingEnabled( namedQueryChecking );
- boolean checkNullability = ConfigurationHelper.getBoolean(Environment.CHECK_NULLABILITY, properties, true);
+ boolean checkNullability = ConfigurationHelper.getBoolean(AvailableSettings.CHECK_NULLABILITY, properties, true);
if ( debugEnabled ) {
LOG.debugf( "Check Nullability in Core (should be disabled when Bean Validation is on): %s", enabledDisabled(checkNullability) );
}
@@ -319,11 +340,21 @@ public class SettingsFactory implements Serializable {
// TODO: Does EntityTuplizerFactory really need to be configurable? revisit for HHH-6383
settings.setEntityTuplizerFactory( new EntityTuplizerFactory() );
-// String provider = properties.getProperty( Environment.BYTECODE_PROVIDER );
+// String provider = properties.getProperty( AvailableSettings.BYTECODE_PROVIDER );
// log.info( "Bytecode provider name : " + provider );
// BytecodeProvider bytecodeProvider = buildBytecodeProvider( provider );
// settings.setBytecodeProvider( bytecodeProvider );
+ boolean initializeLazyStateOutsideTransactionsEnabled = ConfigurationHelper.getBoolean(
+ AvailableSettings.ENABLE_LAZY_LOAD_NO_TRANS,
+ properties,
+ false
+ );
+ if ( debugEnabled ) {
+ LOG.debugf( "Allow initialization of lazy state outside session : : %s", enabledDisabled( initializeLazyStateOutsideTransactionsEnabled ) );
+ }
+ settings.setInitializeLazyStateOutsideTransactions( initializeLazyStateOutsideTransactionsEnabled );
+
return settings;
}
@@ -344,7 +375,7 @@ public class SettingsFactory implements Serializable {
protected QueryCacheFactory createQueryCacheFactory(Properties properties, ServiceRegistry serviceRegistry) {
String queryCacheFactoryClassName = ConfigurationHelper.getString(
- Environment.QUERY_CACHE_FACTORY, properties, StandardQueryCacheFactory.class.getName()
+ AvailableSettings.QUERY_CACHE_FACTORY, properties, StandardQueryCacheFactory.class.getName()
);
LOG.debugf( "Query cache factory: %s", queryCacheFactoryClassName );
try {
@@ -362,7 +393,7 @@ public class SettingsFactory implements Serializable {
// todo : REMOVE! THIS IS TOTALLY A TEMPORARY HACK FOR org.hibernate.cfg.AnnotationBinder which will be going away
String regionFactoryClassName = RegionFactoryInitiator.mapLegacyNames(
ConfigurationHelper.getString(
- Environment.CACHE_REGION_FACTORY, properties, null
+ AvailableSettings.CACHE_REGION_FACTORY, properties, null
)
);
if ( regionFactoryClassName == null ) {
@@ -392,7 +423,7 @@ public class SettingsFactory implements Serializable {
protected QueryTranslatorFactory createQueryTranslatorFactory(Properties properties, ServiceRegistry serviceRegistry) {
String className = ConfigurationHelper.getString(
- Environment.QUERY_TRANSLATOR, properties, "org.hibernate.hql.internal.ast.ASTQueryTranslatorFactory"
+ AvailableSettings.QUERY_TRANSLATOR, properties, "org.hibernate.hql.internal.ast.ASTQueryTranslatorFactory"
);
LOG.debugf( "Query translator: %s", className );
try {
diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java
index e0d64516fc..7d8b4bad2d 100644
--- a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java
+++ b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java
@@ -795,7 +795,7 @@ public abstract class CollectionBinder {
String entityName = oneToMany.getReferencedEntityName();
PersistentClass referenced = mappings.getClass( entityName );
Backref prop = new Backref();
- prop.setName( '_' + fkJoinColumns[0].getPropertyName() + "Backref" );
+ prop.setName( '_' + fkJoinColumns[0].getPropertyName() + '_' + fkJoinColumns[0].getLogicalColumnName() + "Backref" );
prop.setUpdateable( false );
prop.setSelectable( false );
prop.setCollectionRole( collection.getRole() );
diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java
index 27e9235a89..0aa513d6b3 100644
--- a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java
+++ b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java
@@ -24,10 +24,10 @@
package org.hibernate.cfg.annotations;
import java.util.Map;
+
import javax.persistence.EmbeddedId;
import javax.persistence.Id;
-
-import org.jboss.logging.Logger;
+import javax.persistence.Lob;
import org.hibernate.AnnotationException;
import org.hibernate.annotations.Generated;
@@ -57,6 +57,7 @@ import org.hibernate.mapping.RootClass;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.ToOne;
import org.hibernate.mapping.Value;
+import org.jboss.logging.Logger;
/**
* @author Emmanuel Bernard
@@ -264,6 +265,7 @@ public class PropertyBinder {
prop.setLazy( lazy );
prop.setCascade( cascade );
prop.setPropertyAccessorName( accessType.getType() );
+
Generated ann = property != null ?
property.getAnnotation( Generated.class ) :
null;
@@ -286,6 +288,7 @@ public class PropertyBinder {
prop.setGeneration( PropertyGeneration.parse( generated.toString().toLowerCase() ) );
}
}
+
NaturalId naturalId = property != null ? property.getAnnotation( NaturalId.class ) : null;
if ( naturalId != null ) {
if ( ! entityBinder.isRootEntity() ) {
@@ -296,6 +299,11 @@ public class PropertyBinder {
}
prop.setNaturalIdentifier( true );
}
+
+ // HHH-4635 -- needed for dialect-specific property ordering
+ Lob lob = property != null ? property.getAnnotation( Lob.class ) : null;
+ prop.setLob( lob != null );
+
prop.setInsertable( insertable );
prop.setUpdateable( updatable );
diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java
index e8c46256ef..d4b77da49a 100644
--- a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java
+++ b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java
@@ -28,6 +28,7 @@ import java.lang.reflect.TypeVariable;
import java.util.Calendar;
import java.util.Date;
import java.util.Properties;
+
import javax.persistence.AttributeConverter;
import javax.persistence.Convert;
import javax.persistence.Converts;
@@ -227,7 +228,6 @@ public class SimpleValueBinder {
.toXClass( Serializable.class )
.isAssignableFrom( returnedClassOrElement ) ) {
type = SerializableToBlobType.class.getName();
- //typeParameters = new Properties();
typeParameters.setProperty(
SerializableToBlobType.CLASS_NAME,
returnedClassOrElement.getName()
@@ -618,6 +618,7 @@ public class SimpleValueBinder {
parameters.put( DynamicParameterizedType.IS_PRIMARY_KEY, Boolean.toString( key ) );
parameters.put( DynamicParameterizedType.ENTITY, persistentClassName );
+ parameters.put( DynamicParameterizedType.XPROPERTY, xproperty );
parameters.put( DynamicParameterizedType.PROPERTY, xproperty.getName() );
parameters.put( DynamicParameterizedType.ACCESS_TYPE, accessType.getType() );
simpleValue.setTypeParameters( parameters );
diff --git a/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java b/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java
index 2038ef3d2e..05ce17b45e 100644
--- a/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java
+++ b/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java
@@ -33,13 +33,12 @@ import java.util.List;
import java.util.ListIterator;
import javax.naming.NamingException;
-import org.jboss.logging.Logger;
+import javax.naming.NamingException;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.LazyInitializationException;
import org.hibernate.Session;
-import org.hibernate.cfg.AvailableSettings;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.internal.ForeignKeys;
import org.hibernate.engine.spi.CollectionEntry;
@@ -56,6 +55,7 @@ import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.type.Type;
+import org.jboss.logging.Logger;
/**
* Base class implementing {@link org.hibernate.collection.spi.PersistentCollection}
@@ -140,16 +140,22 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
@Override
public Boolean doWork() {
CollectionEntry entry = session.getPersistenceContext().getCollectionEntry( AbstractPersistentCollection.this );
- CollectionPersister persister = entry.getLoadedPersister();
- if ( persister.isExtraLazy() ) {
- if ( hasQueuedOperations() ) {
- session.flush();
+
+ if ( entry != null ) {
+ CollectionPersister persister = entry.getLoadedPersister();
+ if ( persister.isExtraLazy() ) {
+ if ( hasQueuedOperations() ) {
+ session.flush();
+ }
+ cachedSize = persister.getSize( entry.getLoadedKey(), session );
+ return true;
+ }
+ else {
+ read();
}
- cachedSize = persister.getSize( entry.getLoadedKey(), session );
- return true;
}
- else {
- read();
+ else{
+ throwLazyInitializationExceptionIfNotConnected();
}
return false;
}
@@ -170,6 +176,7 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
private T withTemporarySessionIfNeeded(LazyInitializationWork lazyInitializationWork) {
SessionImplementor originalSession = null;
boolean isTempSession = false;
+ boolean isJTA = false;
if ( session == null ) {
if ( specjLazyLoad ) {
@@ -202,6 +209,22 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
}
if ( isTempSession ) {
+ // TODO: On the next major release, add an
+ // 'isJTA' or 'getTransactionFactory' method to Session.
+ isJTA = session.getTransactionCoordinator()
+ .getTransactionContext().getTransactionEnvironment()
+ .getTransactionFactory()
+ .compatibleWithJtaSynchronization();
+
+ if ( !isJTA ) {
+ // Explicitly handle the transactions only if we're not in
+ // a JTA environment. A lazy loading temporary session can
+ // be created even if a current session and transaction are
+ // open (ex: session.clear() was used). We must prevent
+ // multiple transactions.
+ ( ( Session) session ).beginTransaction();
+ }
+
session.getPersistenceContext().addUninitializedDetachedCollection(
session.getFactory().getCollectionPersister( getRole() ),
this
@@ -215,6 +238,9 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
if ( isTempSession ) {
// make sure the just opened temp session gets closed!
try {
+ if ( !isJTA ) {
+ ( ( Session) session ).getTransaction().commit();
+ }
( (Session) session ).close();
}
catch (Exception e) {
@@ -580,11 +606,7 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
protected void prepareForPossibleSpecialSpecjInitialization() {
if ( session != null ) {
- specjLazyLoad = Boolean.parseBoolean(
- session.getFactory()
- .getProperties()
- .getProperty( AvailableSettings.ENABLE_LAZY_LOAD_NO_TRANS )
- );
+ specjLazyLoad = session.getFactory().getSettings().isInitializeLazyStateOutsideTransactionsEnabled();
if ( specjLazyLoad && sessionFactoryUuid == null ) {
try {
@@ -622,9 +644,8 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
throw new HibernateException(
"Illegal attempt to associate a collection with two open sessions: " +
MessageHelper.collectionInfoString(
- ce.getLoadedPersister(),
- ce.getLoadedKey(),
- session.getFactory()
+ ce.getLoadedPersister(), this,
+ ce.getLoadedKey(), session
)
);
}
diff --git a/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java b/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java
index 02ea3a6f4e..3419e783c6 100644
--- a/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java
+++ b/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java
@@ -296,6 +296,7 @@ public class PersistentMap extends AbstractPersistentCollection implements Map {
for ( Object[] entry : loadingEntries ) {
map.put( entry[0], entry[1] );
}
+ loadingEntries = null;
}
return super.endRead();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java
index c9b4291dd1..85aacdef71 100755
--- a/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java
@@ -31,6 +31,9 @@ import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.function.VarArgsSQLFunction;
+import org.hibernate.dialect.pagination.LimitHandler;
+import org.hibernate.dialect.pagination.CUBRIDLimitHandler;
+import org.hibernate.engine.spi.RowSelection;
import org.hibernate.type.StandardBasicTypes;
/**
@@ -39,94 +42,220 @@ import org.hibernate.type.StandardBasicTypes;
* @author Seok Jeong Il
*/
public class CUBRIDDialect extends Dialect {
- @Override
- protected String getIdentityColumnString() throws MappingException {
- return "auto_increment"; //starts with 1, implicitly
- }
-
- @Override
- public String getIdentitySelectString(String table, String column, int type)
- throws MappingException {
- // CUBRID 8.4.0 support last_insert_id()
- // return "select last_insert_id()";
- return "select current_val from db_serial where name = '" + ( table + "_ai_" + column ).toLowerCase() + "'";
- }
-
public CUBRIDDialect() {
super();
- registerColumnType( Types.BIT, "bit(8)" );
- registerColumnType( Types.BIGINT, "numeric(19,0)" );
+ registerColumnType( Types.BIGINT, "bigint" );
+ registerColumnType( Types.BIT, "bit(8)" );
+ registerColumnType( Types.BLOB, "bit varying(65535)" );
+ registerColumnType( Types.BOOLEAN, "bit(8)");
+ registerColumnType( Types.CHAR, "char(1)" );
+ registerColumnType( Types.CLOB, "string" );
+ registerColumnType( Types.DATE, "date" );
+ registerColumnType( Types.DECIMAL, "decimal" );
+ registerColumnType( Types.DOUBLE, "double" );
+ registerColumnType( Types.FLOAT, "float" );
+ registerColumnType( Types.INTEGER, "int" );
+ registerColumnType( Types.NUMERIC, "numeric($p,$s)" );
+ registerColumnType( Types.REAL, "double" );
registerColumnType( Types.SMALLINT, "short" );
- registerColumnType( Types.TINYINT, "short" );
- registerColumnType( Types.INTEGER, "integer" );
- registerColumnType( Types.CHAR, "char(1)" );
- registerColumnType( Types.VARCHAR, 4000, "varchar($l)" );
- registerColumnType( Types.FLOAT, "float" );
- registerColumnType( Types.DOUBLE, "double" );
- registerColumnType( Types.DATE, "date" );
- registerColumnType( Types.TIME, "time" );
+ registerColumnType( Types.TIME, "time" );
registerColumnType( Types.TIMESTAMP, "timestamp" );
- registerColumnType( Types.VARBINARY, 2000, "bit varying($l)" );
- registerColumnType( Types.NUMERIC, "numeric($p,$s)" );
- registerColumnType( Types.BLOB, "blob" );
- registerColumnType( Types.CLOB, "string" );
+ registerColumnType( Types.TINYINT, "short" );
+ registerColumnType( Types.VARBINARY, 2000, "bit varying($l)" );
+ registerColumnType( Types.VARCHAR, "string" );
+ registerColumnType( Types.VARCHAR, 2000, "varchar($l)" );
+ registerColumnType( Types.VARCHAR, 255, "varchar($l)" );
- getDefaultProperties().setProperty( Environment.USE_STREAMS_FOR_BINARY, "true" );
- getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE );
+ getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true");
+ getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE);
- registerFunction( "substring", new StandardSQLFunction( "substr", StandardBasicTypes.STRING ) );
- registerFunction( "trim", new StandardSQLFunction( "trim" ) );
- registerFunction( "length", new StandardSQLFunction( "length", StandardBasicTypes.INTEGER ) );
- registerFunction( "bit_length", new StandardSQLFunction( "bit_length", StandardBasicTypes.INTEGER ) );
- registerFunction( "coalesce", new StandardSQLFunction( "coalesce" ) );
- registerFunction( "nullif", new StandardSQLFunction( "nullif" ) );
- registerFunction( "abs", new StandardSQLFunction( "abs" ) );
- registerFunction( "mod", new StandardSQLFunction( "mod" ) );
- registerFunction( "upper", new StandardSQLFunction( "upper" ) );
- registerFunction( "lower", new StandardSQLFunction( "lower" ) );
+ registerFunction("ascii", new StandardSQLFunction("ascii", StandardBasicTypes.INTEGER) );
+ registerFunction("bin", new StandardSQLFunction("bin", StandardBasicTypes.STRING) );
+ registerFunction("char_length", new StandardSQLFunction("char_length", StandardBasicTypes.LONG) );
+ registerFunction("character_length", new StandardSQLFunction("character_length", StandardBasicTypes.LONG) );
+ registerFunction("lengthb", new StandardSQLFunction("lengthb", StandardBasicTypes.LONG) );
+ registerFunction("lengthh", new StandardSQLFunction("lengthh", StandardBasicTypes.LONG) );
+ registerFunction("lcase", new StandardSQLFunction("lcase") );
+ registerFunction("lower", new StandardSQLFunction("lower") );
+ registerFunction("ltrim", new StandardSQLFunction("ltrim") );
+ registerFunction("reverse", new StandardSQLFunction("reverse") );
+ registerFunction("rtrim", new StandardSQLFunction("rtrim") );
+ registerFunction("trim", new StandardSQLFunction("trim") );
+ registerFunction("space", new StandardSQLFunction("space", StandardBasicTypes.STRING) );
+ registerFunction("ucase", new StandardSQLFunction("ucase") );
+ registerFunction("upper", new StandardSQLFunction("upper") );
- registerFunction( "power", new StandardSQLFunction( "power" ) );
- registerFunction( "stddev", new StandardSQLFunction( "stddev" ) );
- registerFunction( "variance", new StandardSQLFunction( "variance" ) );
- registerFunction( "round", new StandardSQLFunction( "round" ) );
- registerFunction( "trunc", new StandardSQLFunction( "trunc" ) );
- registerFunction( "ceil", new StandardSQLFunction( "ceil" ) );
- registerFunction( "floor", new StandardSQLFunction( "floor" ) );
- registerFunction( "ltrim", new StandardSQLFunction( "ltrim" ) );
- registerFunction( "rtrim", new StandardSQLFunction( "rtrim" ) );
- registerFunction( "nvl", new StandardSQLFunction( "nvl" ) );
- registerFunction( "nvl2", new StandardSQLFunction( "nvl2" ) );
- registerFunction( "sign", new StandardSQLFunction( "sign", StandardBasicTypes.INTEGER ) );
- registerFunction( "chr", new StandardSQLFunction( "chr", StandardBasicTypes.CHARACTER ) );
- registerFunction( "to_char", new StandardSQLFunction( "to_char", StandardBasicTypes.STRING ) );
- registerFunction( "to_date", new StandardSQLFunction( "to_date", StandardBasicTypes.TIMESTAMP ) );
- registerFunction( "last_day", new StandardSQLFunction( "last_day", StandardBasicTypes.DATE ) );
- registerFunction( "instr", new StandardSQLFunction( "instr", StandardBasicTypes.INTEGER ) );
- registerFunction( "instrb", new StandardSQLFunction( "instrb", StandardBasicTypes.INTEGER ) );
- registerFunction( "lpad", new StandardSQLFunction( "lpad", StandardBasicTypes.STRING ) );
- registerFunction( "replace", new StandardSQLFunction( "replace", StandardBasicTypes.STRING ) );
- registerFunction( "rpad", new StandardSQLFunction( "rpad", StandardBasicTypes.STRING ) );
- registerFunction( "substr", new StandardSQLFunction( "substr", StandardBasicTypes.STRING ) );
- registerFunction( "substrb", new StandardSQLFunction( "substrb", StandardBasicTypes.STRING ) );
- registerFunction( "translate", new StandardSQLFunction( "translate", StandardBasicTypes.STRING ) );
- registerFunction( "add_months", new StandardSQLFunction( "add_months", StandardBasicTypes.DATE ) );
- registerFunction( "months_between", new StandardSQLFunction( "months_between", StandardBasicTypes.FLOAT ) );
+ registerFunction("abs", new StandardSQLFunction("abs") );
+ registerFunction("sign", new StandardSQLFunction("sign", StandardBasicTypes.INTEGER) );
- registerFunction( "current_date", new NoArgSQLFunction( "current_date", StandardBasicTypes.DATE, false ) );
- registerFunction( "current_time", new NoArgSQLFunction( "current_time", StandardBasicTypes.TIME, false ) );
- registerFunction(
- "current_timestamp",
- new NoArgSQLFunction( "current_timestamp", StandardBasicTypes.TIMESTAMP, false )
- );
- registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.DATE, false ) );
- registerFunction( "systime", new NoArgSQLFunction( "systime", StandardBasicTypes.TIME, false ) );
- registerFunction( "systimestamp", new NoArgSQLFunction( "systimestamp", StandardBasicTypes.TIMESTAMP, false ) );
- registerFunction( "user", new NoArgSQLFunction( "user", StandardBasicTypes.STRING, false ) );
- registerFunction( "rownum", new NoArgSQLFunction( "rownum", StandardBasicTypes.LONG, false ) );
- registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "", "||", "" ) );
+ registerFunction("acos", new StandardSQLFunction("acos", StandardBasicTypes.DOUBLE) );
+ registerFunction("asin", new StandardSQLFunction("asin", StandardBasicTypes.DOUBLE) );
+ registerFunction("atan", new StandardSQLFunction("atan", StandardBasicTypes.DOUBLE) );
+ registerFunction("cos", new StandardSQLFunction("cos", StandardBasicTypes.DOUBLE) );
+ registerFunction("cot", new StandardSQLFunction("cot", StandardBasicTypes.DOUBLE) );
+ registerFunction("exp", new StandardSQLFunction("exp", StandardBasicTypes.DOUBLE) );
+ registerFunction("ln", new StandardSQLFunction("ln", StandardBasicTypes.DOUBLE) );
+ registerFunction("log2", new StandardSQLFunction("log2", StandardBasicTypes.DOUBLE) );
+ registerFunction("log10", new StandardSQLFunction("log10", StandardBasicTypes.DOUBLE) );
+ registerFunction("pi", new NoArgSQLFunction("pi", StandardBasicTypes.DOUBLE) );
+ registerFunction("rand", new NoArgSQLFunction("rand", StandardBasicTypes.DOUBLE) );
+ registerFunction("random", new NoArgSQLFunction("random", StandardBasicTypes.DOUBLE) );
+ registerFunction("sin", new StandardSQLFunction("sin", StandardBasicTypes.DOUBLE) );
+ registerFunction("sqrt", new StandardSQLFunction("sqrt", StandardBasicTypes.DOUBLE) );
+ registerFunction("tan", new StandardSQLFunction("tan", StandardBasicTypes.DOUBLE) );
+
+ registerFunction("radians", new StandardSQLFunction("radians", StandardBasicTypes.DOUBLE) );
+ registerFunction("degrees", new StandardSQLFunction("degrees", StandardBasicTypes.DOUBLE) );
+
+ registerFunction("ceil", new StandardSQLFunction("ceil", StandardBasicTypes.INTEGER) );
+ registerFunction("floor", new StandardSQLFunction("floor", StandardBasicTypes.INTEGER) );
+ registerFunction("round", new StandardSQLFunction("round") );
+
+ registerFunction("datediff", new StandardSQLFunction("datediff", StandardBasicTypes.INTEGER) );
+ registerFunction("timediff", new StandardSQLFunction("timediff", StandardBasicTypes.TIME) );
+
+ registerFunction("date", new StandardSQLFunction("date", StandardBasicTypes.DATE) );
+ registerFunction("curdate", new NoArgSQLFunction("curdate", StandardBasicTypes.DATE) );
+ registerFunction("current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false) );
+ registerFunction("sys_date", new NoArgSQLFunction("sys_date", StandardBasicTypes.DATE, false) );
+ registerFunction("sysdate", new NoArgSQLFunction("sysdate", StandardBasicTypes.DATE, false) );
+
+ registerFunction("time", new StandardSQLFunction("time", StandardBasicTypes.TIME) );
+ registerFunction("curtime", new NoArgSQLFunction("curtime", StandardBasicTypes.TIME) );
+ registerFunction("current_time", new NoArgSQLFunction("current_time", StandardBasicTypes.TIME, false) );
+ registerFunction("sys_time", new NoArgSQLFunction("sys_time", StandardBasicTypes.TIME, false) );
+ registerFunction("systime", new NoArgSQLFunction("systime", StandardBasicTypes.TIME, false) );
+
+ registerFunction("timestamp", new StandardSQLFunction("timestamp", StandardBasicTypes.TIMESTAMP) );
+ registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIMESTAMP, false) );
+ registerFunction("sys_timestamp", new NoArgSQLFunction("sys_timestamp", StandardBasicTypes.TIMESTAMP, false) );
+ registerFunction("systimestamp", new NoArgSQLFunction("systimestamp", StandardBasicTypes.TIMESTAMP, false) );
+ registerFunction("localtime", new NoArgSQLFunction("localtime", StandardBasicTypes.TIMESTAMP, false) );
+ registerFunction("localtimestamp", new NoArgSQLFunction("localtimestamp", StandardBasicTypes.TIMESTAMP, false) );
+
+ registerFunction("day", new StandardSQLFunction("day", StandardBasicTypes.INTEGER) );
+ registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth", StandardBasicTypes.INTEGER) );
+ registerFunction("dayofweek", new StandardSQLFunction("dayofweek", StandardBasicTypes.INTEGER) );
+ registerFunction("dayofyear", new StandardSQLFunction("dayofyear", StandardBasicTypes.INTEGER) );
+ registerFunction("from_days", new StandardSQLFunction("from_days", StandardBasicTypes.DATE) );
+ registerFunction("from_unixtime", new StandardSQLFunction("from_unixtime", StandardBasicTypes.TIMESTAMP) );
+ registerFunction("last_day", new StandardSQLFunction("last_day", StandardBasicTypes.DATE) );
+ registerFunction("minute", new StandardSQLFunction("minute", StandardBasicTypes.INTEGER) );
+ registerFunction("month", new StandardSQLFunction("month", StandardBasicTypes.INTEGER) );
+ registerFunction("months_between", new StandardSQLFunction("months_between", StandardBasicTypes.DOUBLE) );
+ registerFunction("now", new NoArgSQLFunction("now", StandardBasicTypes.TIMESTAMP) );
+ registerFunction("quarter", new StandardSQLFunction("quarter", StandardBasicTypes.INTEGER) );
+ registerFunction("second", new StandardSQLFunction("second", StandardBasicTypes.INTEGER) );
+ registerFunction("sec_to_time", new StandardSQLFunction("sec_to_time", StandardBasicTypes.TIME) );
+ registerFunction("time_to_sec", new StandardSQLFunction("time_to_sec", StandardBasicTypes.INTEGER) );
+ registerFunction("to_days", new StandardSQLFunction("to_days", StandardBasicTypes.LONG) );
+ registerFunction("unix_timestamp", new StandardSQLFunction("unix_timestamp", StandardBasicTypes.LONG) );
+ registerFunction("utc_date", new NoArgSQLFunction("utc_date", StandardBasicTypes.STRING) );
+ registerFunction("utc_time", new NoArgSQLFunction("utc_time", StandardBasicTypes.STRING) );
+ registerFunction("week", new StandardSQLFunction("week", StandardBasicTypes.INTEGER) );
+ registerFunction("weekday", new StandardSQLFunction("weekday", StandardBasicTypes.INTEGER) );
+ registerFunction("year", new StandardSQLFunction("year", StandardBasicTypes.INTEGER) );
+
+ registerFunction("hex", new StandardSQLFunction("hex", StandardBasicTypes.STRING) );
+
+ registerFunction("octet_length", new StandardSQLFunction("octet_length", StandardBasicTypes.LONG) );
+ registerFunction("bit_length", new StandardSQLFunction("bit_length", StandardBasicTypes.LONG) );
+
+ registerFunction("bit_count", new StandardSQLFunction("bit_count", StandardBasicTypes.LONG) );
+ registerFunction("md5", new StandardSQLFunction("md5", StandardBasicTypes.STRING) );
+
+ registerFunction( "concat", new StandardSQLFunction( "concat", StandardBasicTypes.STRING ) );
+
+ registerFunction("substring", new StandardSQLFunction("substring", StandardBasicTypes.STRING) );
+ registerFunction("substr", new StandardSQLFunction("substr", StandardBasicTypes.STRING) );
+
+ registerFunction("length", new StandardSQLFunction("length", StandardBasicTypes.INTEGER) );
+ registerFunction("bit_length",new StandardSQLFunction("bit_length", StandardBasicTypes.INTEGER) );
+ registerFunction("coalesce", new StandardSQLFunction("coalesce") );
+ registerFunction("nullif", new StandardSQLFunction("nullif") );
+ registerFunction("mod", new StandardSQLFunction("mod") );
+
+ registerFunction("power", new StandardSQLFunction("power") );
+ registerFunction("stddev", new StandardSQLFunction("stddev") );
+ registerFunction("variance", new StandardSQLFunction("variance") );
+ registerFunction("trunc", new StandardSQLFunction("trunc") );
+ registerFunction("nvl", new StandardSQLFunction("nvl") );
+ registerFunction("nvl2", new StandardSQLFunction("nvl2") );
+ registerFunction("chr", new StandardSQLFunction("chr", StandardBasicTypes.CHARACTER));
+ registerFunction("to_char", new StandardSQLFunction("to_char", StandardBasicTypes.STRING) );
+ registerFunction("to_date", new StandardSQLFunction("to_date", StandardBasicTypes.TIMESTAMP));
+ registerFunction("instr", new StandardSQLFunction("instr", StandardBasicTypes.INTEGER) );
+ registerFunction("instrb", new StandardSQLFunction("instrb", StandardBasicTypes.INTEGER) );
+ registerFunction("lpad", new StandardSQLFunction("lpad", StandardBasicTypes.STRING) );
+ registerFunction("replace", new StandardSQLFunction("replace", StandardBasicTypes.STRING) );
+ registerFunction("rpad", new StandardSQLFunction("rpad", StandardBasicTypes.STRING) );
+ registerFunction("translate", new StandardSQLFunction("translate", StandardBasicTypes.STRING) );
+
+ registerFunction("add_months", new StandardSQLFunction("add_months", StandardBasicTypes.DATE) );
+ registerFunction("user", new NoArgSQLFunction("user", StandardBasicTypes.STRING, false) );
+ registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.LONG, false) );
+ registerFunction("concat", new VarArgsSQLFunction(StandardBasicTypes.STRING, "", "||", ""));
+
+ registerKeyword( "TYPE" );
+ registerKeyword( "YEAR" );
+ registerKeyword( "MONTH" );
+ registerKeyword( "ALIAS" );
+ registerKeyword( "VALUE" );
+ registerKeyword( "FIRST" );
+ registerKeyword( "ROLE" );
+ registerKeyword( "CLASS" );
+ registerKeyword( "BIT" );
+ registerKeyword( "TIME" );
+ registerKeyword( "QUERY" );
+ registerKeyword( "DATE" );
+ registerKeyword( "USER" );
+ registerKeyword( "ACTION" );
+ registerKeyword( "SYS_USER" );
+ registerKeyword( "ZONE" );
+ registerKeyword( "LANGUAGE" );
+ registerKeyword( "DICTIONARY" );
+ registerKeyword( "DATA" );
+ registerKeyword( "TEST" );
+ registerKeyword( "SUPERCLASS" );
+ registerKeyword( "SECTION" );
+ registerKeyword( "LOWER" );
+ registerKeyword( "LIST" );
+ registerKeyword( "OID" );
+ registerKeyword( "DAY" );
+ registerKeyword( "IF" );
+ registerKeyword( "ATTRIBUTE" );
+ registerKeyword( "STRING" );
+ registerKeyword( "SEARCH" );
+ }
+
+ public boolean supportsIdentityColumns() {
+ return true;
+ }
+
+ public String getIdentityInsertString() {
+ return "NULL";
+ }
+
+ public boolean supportsColumnCheck() {
+ return false;
+ }
+
+ public boolean supportsPooledSequences() {
+ return true;
+ }
+
+ public String getIdentitySelectString() {
+ return "select last_insert_id()";
}
+ protected String getIdentityColumnString() {
+ return "not null auto_increment"; //starts with 1, implicitly
+ }
+
+ /*
+ * CUBRID supports "ADD [COLUMN | ATTRIBUTE]"
+ */
public String getAddColumnString() {
return "add";
}
@@ -143,50 +272,39 @@ public class CUBRIDDialect extends Dialect {
return "drop serial " + sequenceName;
}
+ public String getDropForeignKeyString() {
+ return " drop foreign key ";
+ }
+
+ public boolean qualifyIndexName() {
+ return false;
+ }
+
public boolean supportsSequences() {
return true;
}
+ public boolean supportsExistsInSelect() {
+ return false;
+ }
+
public String getQuerySequencesString() {
return "select name from db_serial";
}
- public boolean dropConstraints() {
- return false;
- }
-
- public boolean supportsLimit() {
- return true;
- }
-
- public String getLimitString(String sql, boolean hasOffset) {
- // CUBRID 8.3.0 support limit
- return new StringBuilder( sql.length() + 20 ).append( sql )
- .append( hasOffset ? " limit ?, ?" : " limit ?" ).toString();
- }
-
- public boolean bindLimitParametersInReverseOrder() {
- return true;
- }
-
- public boolean useMaxForLimit() {
- return true;
- }
-
- public boolean forUpdateOfColumns() {
- return true;
- }
-
- public char closeQuote() {
- return ']';
- }
-
+ /**
+ * The character specific to this dialect used to close a quoted identifier.
+ * CUBRID supports square brackets (MSSQL style), backticks (MySQL style),
+ * as well as double quotes (Oracle style).
+ *
+ * @return The dialect's specific open quote character.
+ */
public char openQuote() {
return '[';
}
- public boolean hasAlterTable() {
- return false;
+ public char closeQuote() {
+ return ']';
}
public String getForUpdateString() {
@@ -197,23 +315,31 @@ public class CUBRIDDialect extends Dialect {
return true;
}
- public boolean supportsCommentOn() {
- return false;
- }
-
- public boolean supportsTemporaryTables() {
- return false;
- }
-
public boolean supportsCurrentTimestampSelection() {
return true;
}
public String getCurrentTimestampSelectString() {
- return "select systimestamp from table({1}) as T(X)";
+ return "select now()";
}
public boolean isCurrentTimestampSelectStringCallable() {
return false;
}
+
+ public boolean supportsEmptyInList() {
+ return false;
+ }
+
+ public boolean supportsIfExistsBeforeTableName() {
+ return true;
+ }
+
+ public boolean supportsTupleDistinctCounts() {
+ return false;
+ }
+
+ public LimitHandler buildLimitHandler(String sql, RowSelection selection) {
+ return new CUBRIDLimitHandler( this, sql, selection );
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java
index 04da5a447c..3bfa7a7695 100644
--- a/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java
@@ -159,7 +159,7 @@ public class DB2Dialect extends Dialect {
registerFunction( "substring", new StandardSQLFunction( "substr", StandardBasicTypes.STRING ) );
registerFunction( "bit_length", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "length(?1)*8" ) );
- registerFunction( "trim", new AnsiTrimEmulationFunction() );
+ registerFunction( "trim", new SQLFunctionTemplate( StandardBasicTypes.STRING, "trim(?1 ?2 ?3 ?4)" ) );
registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "", "||", "" ) );
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java
index 288fabf6e5..d2374221f7 100644
--- a/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java
@@ -39,8 +39,6 @@ import java.util.Map;
import java.util.Properties;
import java.util.Set;
-import org.jboss.logging.Logger;
-
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
@@ -83,6 +81,11 @@ import org.hibernate.metamodel.spi.relational.Sequence;
import org.hibernate.metamodel.spi.relational.Table;
import org.hibernate.metamodel.spi.relational.UniqueKey;
import org.hibernate.persister.entity.Lockable;
+import org.hibernate.sql.ANSICaseFragment;
+import org.hibernate.sql.ANSIJoinFragment;
+import org.hibernate.sql.CaseFragment;
+import org.hibernate.sql.ForUpdateFragment;
+import org.hibernate.sql.JoinFragment;
import org.hibernate.tool.schema.internal.StandardAuxiliaryDatabaseObjectExporter;
import org.hibernate.tool.schema.internal.StandardForeignKeyExporter;
import org.hibernate.tool.schema.internal.StandardIndexExporter;
@@ -90,15 +93,10 @@ import org.hibernate.tool.schema.internal.StandardSequenceExporter;
import org.hibernate.tool.schema.internal.StandardTableExporter;
import org.hibernate.tool.schema.internal.StandardUniqueKeyExporter;
import org.hibernate.tool.schema.spi.Exporter;
-import org.hibernate.sql.ANSICaseFragment;
-import org.hibernate.sql.ANSIJoinFragment;
-import org.hibernate.sql.CaseFragment;
-import org.hibernate.sql.ForUpdateFragment;
-import org.hibernate.sql.JoinFragment;
import org.hibernate.type.StandardBasicTypes;
-import org.hibernate.type.descriptor.sql.BlobTypeDescriptor;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
+import org.jboss.logging.Logger;
/**
* Represents a dialect of SQL implemented by a particular RDBMS.
@@ -327,6 +325,23 @@ public abstract class Dialect implements ConversionContext {
return getTypeName( code, Column.DEFAULT_LENGTH, Column.DEFAULT_PRECISION, Column.DEFAULT_SCALE );
}
+ public String cast(String value, int jdbcTypeCode, int length, int precision, int scale) {
+ if ( jdbcTypeCode == Types.CHAR ) {
+ return "cast(" + value + " as char(" + length + "))";
+ }
+ else {
+ return "cast(" + value + "as " + getTypeName( jdbcTypeCode, length, precision, scale ) + ")";
+ }
+ }
+
+ public String cast(String value, int jdbcTypeCode, int length) {
+ return cast( value, jdbcTypeCode, length, Column.DEFAULT_PRECISION, Column.DEFAULT_SCALE );
+ }
+
+ public String cast(String value, int jdbcTypeCode, int precision, int scale) {
+ return cast( value, jdbcTypeCode, Column.DEFAULT_LENGTH, precision, scale );
+ }
+
/**
* Subclasses register a type name for the given type code and maximum
* column length. $l in the type name with be replaced by the
@@ -391,10 +406,6 @@ public abstract class Dialect implements ConversionContext {
protected SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) {
SqlTypeDescriptor descriptor;
switch ( sqlCode ) {
- case Types.BLOB: {
- descriptor = useInputStreamToInsertBlob() ? BlobTypeDescriptor.STREAM_BINDING : null;
- break;
- }
case Types.CLOB: {
descriptor = useInputStreamToInsertBlob() ? ClobTypeDescriptor.STREAM_BINDING : null;
break;
@@ -617,7 +628,9 @@ public abstract class Dialect implements ConversionContext {
// function support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
protected void registerFunction(String name, SQLFunction function) {
- sqlFunctions.put( name, function );
+ // HHH-7721: SQLFunctionRegistry expects all lowercase. Enforce,
+ // just in case a user's customer dialect uses mixed cases.
+ sqlFunctions.put( name.toLowerCase(), function );
}
/**
@@ -2419,4 +2432,15 @@ public abstract class Dialect implements ConversionContext {
public int getInExpressionCountLimit() {
return 0;
}
+
+ /**
+ * HHH-4635
+ * Oracle expects all Lob values to be last in inserts and updates.
+ *
+ * @return boolean True of Lob values should be last, false if it
+ * does not matter.
+ */
+ public boolean forceLobAsLastValue() {
+ return false;
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java
index 42338ef7a1..ca64af3dcd 100644
--- a/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java
@@ -27,8 +27,6 @@ import java.io.Serializable;
import java.sql.SQLException;
import java.sql.Types;
-import org.jboss.logging.Logger;
-
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.StaleObjectStateException;
@@ -53,6 +51,7 @@ import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.type.StandardBasicTypes;
+import org.jboss.logging.Logger;
/**
* An SQL dialect compatible with HSQLDB (HyperSQL).
@@ -123,8 +122,8 @@ public class HSQLDialect extends Dialect {
registerColumnType( Types.CLOB, "longvarchar" );
}
else {
- registerColumnType( Types.BLOB, "blob" );
- registerColumnType( Types.CLOB, "clob" );
+ registerColumnType( Types.BLOB, "blob($l)" );
+ registerColumnType( Types.CLOB, "clob($l)" );
}
// aggregate functions
@@ -244,7 +243,12 @@ public class HSQLDialect extends Dialect {
}
public String getForUpdateString() {
- return "";
+ if ( hsqldbVersion >= 20 ) {
+ return " for update";
+ }
+ else {
+ return "";
+ }
}
public boolean supportsUnique() {
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java
index 40a5891f43..5dcbc459dc 100644
--- a/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java
@@ -123,6 +123,7 @@ public class Oracle8iDialect extends Dialect {
registerFunction( "acos", new StandardSQLFunction("acos", StandardBasicTypes.DOUBLE) );
registerFunction( "asin", new StandardSQLFunction("asin", StandardBasicTypes.DOUBLE) );
registerFunction( "atan", new StandardSQLFunction("atan", StandardBasicTypes.DOUBLE) );
+ registerFunction( "bitand", new StandardSQLFunction("bitand") );
registerFunction( "cos", new StandardSQLFunction("cos", StandardBasicTypes.DOUBLE) );
registerFunction( "cosh", new StandardSQLFunction("cosh", StandardBasicTypes.DOUBLE) );
registerFunction( "exp", new StandardSQLFunction("exp", StandardBasicTypes.DOUBLE) );
@@ -570,5 +571,10 @@ public class Oracle8iDialect extends Dialect {
public int getInExpressionCountLimit() {
return PARAM_LIST_SIZE_LIMIT;
}
+
+ @Override
+ public boolean forceLobAsLastValue() {
+ return true;
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java
index a99661ee91..b3d36326d3 100644
--- a/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java
@@ -163,6 +163,10 @@ public class PostgreSQL81Dialect extends Dialect {
SqlTypeDescriptor descriptor;
switch ( sqlCode ) {
case Types.BLOB: {
+ // Force BLOB binding. Otherwise, byte[] fields annotated
+ // with @Lob will attempt to use
+ // BlobTypeDescriptor.PRIMITIVE_ARRAY_BINDING. Since the
+ // dialect uses oid for Blobs, byte arrays cannot be used.
descriptor = BlobTypeDescriptor.BLOB_BINDING;
break;
}
@@ -462,4 +466,14 @@ public class PostgreSQL81Dialect extends Dialect {
public boolean supportsRowValueConstructorSyntax() {
return true;
}
+
+ @Override
+ public String getForUpdateNowaitString() {
+ return getForUpdateString() + " nowait ";
+ }
+
+ @Override
+ public String getForUpdateNowaitString(String aliases) {
+ return getForUpdateString(aliases) + " nowait ";
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java
index 13f333933c..4064b2347a 100644
--- a/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java
@@ -23,6 +23,11 @@
*/
package org.hibernate.dialect;
+import java.sql.Types;
+
+import org.hibernate.type.descriptor.sql.BlobTypeDescriptor;
+import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
+
/**
* All Sybase dialects share an IN list size limit.
@@ -40,4 +45,9 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
public int getInExpressionCountLimit() {
return PARAM_LIST_SIZE_LIMIT;
}
+
+ @Override
+ protected SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) {
+ return sqlCode == Types.BLOB ? BlobTypeDescriptor.PRIMITIVE_ARRAY_BINDING : super.getSqlTypeDescriptorOverride( sqlCode );
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java b/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java
index e013421538..9eec303f82 100644
--- a/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java
@@ -38,7 +38,6 @@ public class SQLFunctionRegistry {
}
public SQLFunction findSQLFunction(String functionName) {
- // TODO: lower casing done here. Was done "at random" before; maybe not needed at all ?
String name = functionName.toLowerCase();
SQLFunction userFunction = userFunctions.get( name );
return userFunction != null
@@ -47,7 +46,6 @@ public class SQLFunctionRegistry {
}
public boolean hasFunction(String functionName) {
- // TODO: toLowerCase was not done before. Only used in Template.
String name = functionName.toLowerCase();
return userFunctions.containsKey( name ) || dialect.getFunctions().containsKey( name );
}
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/pagination/CUBRIDLimitHandler.java b/hibernate-core/src/main/java/org/hibernate/dialect/pagination/CUBRIDLimitHandler.java
new file mode 100644
index 0000000000..4ee34f42ae
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/dialect/pagination/CUBRIDLimitHandler.java
@@ -0,0 +1,37 @@
+package org.hibernate.dialect.pagination;
+
+import org.hibernate.dialect.Dialect;
+import org.hibernate.engine.spi.RowSelection;
+
+/**
+ * Limit handler that delegates all operations to the underlying dialect.
+ *
+ * @author Esen Sagynov (kadishmal at gmail dot com)
+ */
+public class CUBRIDLimitHandler extends AbstractLimitHandler {
+ private final Dialect dialect;
+
+ public CUBRIDLimitHandler(Dialect dialect, String sql, RowSelection selection) {
+ super( sql, selection );
+ this.dialect = dialect;
+ }
+
+ public boolean supportsLimit() {
+ return true;
+ }
+
+ public String getProcessedSql() {
+ if (LimitHelper.useLimit(this, selection)) {
+ // useLimitOffset: whether "offset" is set or not;
+ // if set, use "LIMIT offset, row_count" syntax;
+ // if not, use "LIMIT row_count"
+ boolean useLimitOffset = LimitHelper.hasFirstRow(selection);
+
+ return new StringBuilder(sql.length() + 20).append(sql)
+ .append(useLimitOffset ? " limit ?, ?" : " limit ?").toString();
+ }
+ else {
+ return sql; // or return unaltered SQL
+ }
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java
index ba4bb21c66..c9463b5659 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java
@@ -252,9 +252,12 @@ public final class Cascade {
loadedValue = null;
}
if ( loadedValue != null ) {
- final String entityName = entry.getPersister().getEntityName();
+ final EntityEntry valueEntry = eventSource
+ .getPersistenceContext().getEntry(
+ loadedValue );
+ final String entityName = valueEntry.getPersister().getEntityName();
if ( LOG.isTraceEnabled() ) {
- final Serializable id = entry.getPersister().getIdentifier( loadedValue, eventSource );
+ final Serializable id = valueEntry.getPersister().getIdentifier( loadedValue, eventSource );
final String description = MessageHelper.infoString( entityName, id );
LOG.tracev( "Deleting orphaned entity instance: {0}", description );
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java
index 16311723da..2c6a840ba3 100755
--- a/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java
@@ -25,8 +25,6 @@ package org.hibernate.engine.internal;
import java.io.Serializable;
-import org.jboss.logging.Logger;
-
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.collection.spi.PersistentCollection;
@@ -41,6 +39,7 @@ import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.type.CollectionType;
+import org.jboss.logging.Logger;
/**
* Implements book-keeping for the collection persistence by reachability algorithm
@@ -76,10 +75,8 @@ public final class Collections {
if ( LOG.isDebugEnabled() && loadedPersister != null ) {
LOG.debugf(
"Collection dereferenced: %s",
- MessageHelper.collectionInfoString(
- loadedPersister,
- entry.getLoadedKey(),
- session.getFactory()
+ MessageHelper.collectionInfoString( loadedPersister,
+ coll, entry.getLoadedKey(), session
)
);
}
@@ -135,7 +132,9 @@ public final class Collections {
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Found collection with unloaded owner: %s",
- MessageHelper.collectionInfoString( entry.getLoadedPersister(), entry.getLoadedKey(), session.getFactory() ) );
+ MessageHelper.collectionInfoString(
+ entry.getLoadedPersister(), coll,
+ entry.getLoadedKey(), session ) );
}
entry.setCurrentPersister( entry.getLoadedPersister() );
@@ -189,13 +188,13 @@ public final class Collections {
if (LOG.isDebugEnabled()) {
if (collection.wasInitialized()) LOG.debugf("Collection found: %s, was: %s (initialized)",
- MessageHelper.collectionInfoString(persister, ce.getCurrentKey(), factory),
- MessageHelper.collectionInfoString(ce.getLoadedPersister(),
+ MessageHelper.collectionInfoString(persister, collection, ce.getCurrentKey(), session),
+ MessageHelper.collectionInfoString(ce.getLoadedPersister(), collection,
ce.getLoadedKey(),
- factory));
+ session));
else LOG.debugf("Collection found: %s, was: %s (uninitialized)",
- MessageHelper.collectionInfoString(persister, ce.getCurrentKey(), factory),
- MessageHelper.collectionInfoString(ce.getLoadedPersister(), ce.getLoadedKey(), factory));
+ MessageHelper.collectionInfoString(persister, collection, ce.getCurrentKey(), session),
+ MessageHelper.collectionInfoString(ce.getLoadedPersister(), collection, ce.getLoadedKey(), session));
}
prepareCollectionForUpdate( collection, ce, factory );
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java
index 55df82ce3b..d195654a15 100755
--- a/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java
@@ -71,31 +71,46 @@ public final class JoinHelper {
* be used in the join
*/
public static String[] getAliasedLHSColumnNames(
- AssociationType type,
- String alias,
- int property,
+ AssociationType associationType,
+ String columnQualifier,
+ int propertyIndex,
int begin,
OuterJoinLoadable lhsPersister,
- Mapping mapping
- ) {
- if ( type.useLHSPrimaryKey() ) {
- return StringHelper.qualify( alias, lhsPersister.getIdentifierColumnNames() );
+ Mapping mapping) {
+ if ( associationType.useLHSPrimaryKey() ) {
+ return StringHelper.qualify( columnQualifier, lhsPersister.getIdentifierColumnNames() );
}
else {
- String propertyName = type.getLHSPropertyName();
- if (propertyName==null) {
- return ArrayHelper.slice(
- lhsPersister.toColumns(alias, property),
- begin,
- type.getColumnSpan(mapping)
- );
+ String propertyName = associationType.getLHSPropertyName();
+ if ( propertyName == null ) {
+ return ArrayHelper.slice(
+ toColumns( lhsPersister, columnQualifier, propertyIndex ),
+ begin,
+ associationType.getColumnSpan( mapping )
+ );
}
else {
- return ( (PropertyMapping) lhsPersister ).toColumns(alias, propertyName); //bad cast
+ return ( (PropertyMapping) lhsPersister ).toColumns(columnQualifier, propertyName); //bad cast
}
}
}
-
+
+ private static String[] toColumns(OuterJoinLoadable persister, String columnQualifier, int propertyIndex) {
+ if ( propertyIndex >= 0 ) {
+ return persister.toColumns( columnQualifier, propertyIndex );
+ }
+ else {
+ final String[] cols = persister.getIdentifierColumnNames();
+ final String[] result = new String[cols.length];
+
+ for ( int j = 0; j < cols.length; j++ ) {
+ result[j] = StringHelper.qualify( columnQualifier, cols[j] );
+ }
+
+ return result;
+ }
+ }
+
/**
* Get the columns of the owning entity which are to
* be used in the join
@@ -116,8 +131,10 @@ public final class JoinHelper {
if (propertyName==null) {
//slice, to get the columns for this component
//property
- return ArrayHelper.slice(
- lhsPersister.getSubclassPropertyColumnNames(property),
+ return ArrayHelper.slice(
+ property < 0
+ ? lhsPersister.getIdentifierColumnNames()
+ : lhsPersister.getSubclassPropertyColumnNames(property),
begin,
type.getColumnSpan(mapping)
);
@@ -131,11 +148,10 @@ public final class JoinHelper {
}
public static String getLHSTableName(
- AssociationType type,
- int property,
- OuterJoinLoadable lhsPersister
- ) {
- if ( type.useLHSPrimaryKey() ) {
+ AssociationType type,
+ int propertyIndex,
+ OuterJoinLoadable lhsPersister) {
+ if ( type.useLHSPrimaryKey() || propertyIndex < 0 ) {
return lhsPersister.getTableName();
}
else {
@@ -144,7 +160,7 @@ public final class JoinHelper {
//if there is no property-ref, assume the join
//is to the subclass table (ie. the table of the
//subclass that the association belongs to)
- return lhsPersister.getSubclassPropertyTableName(property);
+ return lhsPersister.getSubclassPropertyTableName(propertyIndex);
}
else {
//handle a property-ref
@@ -157,7 +173,7 @@ public final class JoinHelper {
//assumes that the property-ref refers to a property of the subclass
//table that the association belongs to (a reasonable guess)
//TODO: fix this, add: OuterJoinLoadable.getSubclassPropertyTableName(String propertyName)
- propertyRefTable = lhsPersister.getSubclassPropertyTableName(property);
+ propertyRefTable = lhsPersister.getSubclassPropertyTableName(propertyIndex);
}
return propertyRefTable;
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java
index 6b7185df56..0af6499f14 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java
@@ -93,6 +93,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, StatefulPersistenceContext.class.getName() );
+ private static final boolean tracing = LOG.isTraceEnabled();
+
public static final Object NO_ROW = new MarkerObject( "NO_ROW" );
private static final int INIT_COLL_SIZE = 8;
@@ -893,6 +895,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
public void addUninitializedCollection(CollectionPersister persister, PersistentCollection collection, Serializable id) {
CollectionEntry ce = new CollectionEntry(collection, persister, id, flushing);
addCollection(collection, ce, id);
+ if ( persister.getBatchSize() > 1 ) {
+ getBatchFetchQueue().addBatchLoadableCollection( collection, ce );
+ }
}
/**
@@ -902,6 +907,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
public void addUninitializedDetachedCollection(CollectionPersister persister, PersistentCollection collection) {
CollectionEntry ce = new CollectionEntry( persister, collection.getKey() );
addCollection( collection, ce, collection.getKey() );
+ if ( persister.getBatchSize() > 1 ) {
+ getBatchFetchQueue().addBatchLoadableCollection( collection, ce );
+ }
}
/**
@@ -1003,7 +1011,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override
public void initializeNonLazyCollections() throws HibernateException {
if ( loadCounter == 0 ) {
- LOG.debug( "Initializing non-lazy collections" );
+ if (tracing)
+ LOG.trace( "Initializing non-lazy collections" );
+
//do this work only at the very highest level of the load
loadCounter++; //don't let this method be called recursively
try {
@@ -1861,14 +1871,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
CachedNaturalIdValueSource source) {
final NaturalIdRegionAccessStrategy naturalIdCacheAccessStrategy = persister.getNaturalIdCacheAccessStrategy();
final NaturalIdCacheKey naturalIdCacheKey = new NaturalIdCacheKey( naturalIdValues, persister, session );
- if (naturalIdCacheAccessStrategy.get(naturalIdCacheKey, session.getTimestamp()) != null) {
- return; // prevent identical re-cachings
- }
final SessionFactoryImplementor factory = session.getFactory();
switch ( source ) {
case LOAD: {
+ if (naturalIdCacheAccessStrategy.get(naturalIdCacheKey, session.getTimestamp()) != null) {
+ return; // prevent identical re-cachings
+ }
final boolean put = naturalIdCacheAccessStrategy.putFromLoad(
naturalIdCacheKey,
id,
@@ -1915,6 +1925,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
case UPDATE: {
final NaturalIdCacheKey previousCacheKey = new NaturalIdCacheKey( previousNaturalIdValues, persister, session );
+ if (naturalIdCacheKey.equals(previousCacheKey)) {
+ return; // prevent identical re-caching, solves HHH-7309
+ }
final SoftLock removalLock = naturalIdCacheAccessStrategy.lockItem( previousCacheKey, null );
naturalIdCacheAccessStrategy.remove( previousCacheKey );
@@ -2078,6 +2091,15 @@ public class StatefulPersistenceContext implements PersistenceContext {
public void cleanupFromSynchronizations() {
naturalIdXrefDelegate.unStashInvalidNaturalIdReferences();
}
+
+ @Override
+ public void handleEviction(Object object, EntityPersister persister, Serializable identifier) {
+ naturalIdXrefDelegate.removeNaturalIdCrossReference(
+ persister,
+ identifier,
+ findCachedNaturalId( persister, identifier )
+ );
+ }
};
@Override
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java
index a434f584ff..abfe8851ce 100755
--- a/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java
@@ -281,19 +281,6 @@ public final class TwoPhaseLoad {
session
);
- if ( session.isEventSource() ) {
- postLoadEvent.setEntity( entity ).setId( id ).setPersister( persister );
-
- final EventListenerGroup listenerGroup = session
- .getFactory()
- .getServiceRegistry()
- .getService( EventListenerRegistry.class )
- .getEventListenerGroup( EventType.POST_LOAD );
- for ( PostLoadEventListener listener : listenerGroup.listeners() ) {
- listener.onPostLoad( postLoadEvent );
- }
- }
-
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"Done materializing entity %s",
@@ -305,6 +292,45 @@ public final class TwoPhaseLoad {
factory.getStatisticsImplementor().loadEntity( persister.getEntityName() );
}
}
+
+ /**
+ * PostLoad cannot occur during initializeEntity, as that call occurs *before*
+ * the Set collections are added to the persistence context by Loader.
+ * Without the split, LazyInitializationExceptions can occur in the Entity's
+ * postLoad if it acts upon the collection.
+ *
+ *
+ * HHH-6043
+ *
+ * @param entity
+ * @param session
+ * @param postLoadEvent
+ */
+ public static void postLoad(
+ final Object entity,
+ final SessionImplementor session,
+ final PostLoadEvent postLoadEvent) {
+
+ if ( session.isEventSource() ) {
+ final PersistenceContext persistenceContext
+ = session.getPersistenceContext();
+ final EntityEntry entityEntry = persistenceContext.getEntry(entity);
+ final Serializable id = entityEntry.getId();
+
+ postLoadEvent.setEntity( entity ).setId( entityEntry.getId() )
+ .setPersister( entityEntry.getPersister() );
+
+ final EventListenerGroup listenerGroup
+ = session
+ .getFactory()
+ .getServiceRegistry()
+ .getService( EventListenerRegistry.class )
+ .getEventListenerGroup( EventType.POST_LOAD );
+ for ( PostLoadEventListener listener : listenerGroup.listeners() ) {
+ listener.onPostLoad( postLoadEvent );
+ }
+ }
+ }
private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) {
return ( session.getFactory().getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() &&
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java
index d4e463ffa7..2150d73c62 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java
@@ -31,16 +31,12 @@ import java.sql.Clob;
* @author Steve Ebersole
*/
public abstract class AbstractLobCreator implements LobCreator {
- /**
- * {@inheritDoc}
- */
+ @Override
public Blob wrap(Blob blob) {
return SerializableBlobProxy.generateProxy( blob );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Clob wrap(Clob clob) {
if ( SerializableNClobProxy.isNClob( clob ) ) {
return SerializableNClobProxy.generateProxy( clob );
diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/BinaryStream.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BinaryStream.java
similarity index 91%
rename from hibernate-core/src/main/java/org/hibernate/type/descriptor/BinaryStream.java
rename to hibernate-core/src/main/java/org/hibernate/engine/jdbc/BinaryStream.java
index 24ef16ce94..1e15d37fe3 100644
--- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/BinaryStream.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BinaryStream.java
@@ -21,7 +21,8 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
-package org.hibernate.type.descriptor;
+package org.hibernate.engine.jdbc;
+
import java.io.InputStream;
/**
@@ -49,5 +50,10 @@ public interface BinaryStream {
*
* @return The input stream length
*/
- public int getLength();
+ public long getLength();
+
+ /**
+ * Release any underlying resources.
+ */
+ public void release();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java
index 2f10a3f8aa..e5a7983085 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java
@@ -23,11 +23,16 @@
*/
package org.hibernate.engine.jdbc;
-
/**
* Marker interface for non-contextually created {@link java.sql.Blob} instances..
*
* @author Steve Ebersole
*/
public interface BlobImplementer {
+ /**
+ * Gets access to the data underlying this BLOB.
+ *
+ * @return Access to the underlying data.
+ */
+ public BinaryStream getUnderlyingStream();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java
index 4be9234a83..0bc01a7eee 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationHandler;
@@ -30,12 +31,12 @@ import java.lang.reflect.Proxy;
import java.sql.Blob;
import java.sql.SQLException;
-import org.hibernate.type.descriptor.java.BinaryStreamImpl;
+import org.hibernate.engine.jdbc.internal.BinaryStreamImpl;
import org.hibernate.type.descriptor.java.DataHelper;
/**
- * Manages aspects of proxying {@link Blob Blobs} for non-contextual creation, including proxy creation and
- * handling proxy invocations.
+ * Manages aspects of proxying {@link Blob} references for non-contextual creation, including proxy creation and
+ * handling proxy invocations. We use proxies here solely to avoid JDBC version incompatibilities.
*
* @author Gavin King
* @author Steve Ebersole
@@ -44,8 +45,7 @@ import org.hibernate.type.descriptor.java.DataHelper;
public class BlobProxy implements InvocationHandler {
private static final Class[] PROXY_INTERFACES = new Class[] { Blob.class, BlobImplementer.class };
- private InputStream stream;
- private long length;
+ private BinaryStream binaryStream;
private boolean needsReset = false;
/**
@@ -55,8 +55,7 @@ public class BlobProxy implements InvocationHandler {
* @see #generateProxy(byte[])
*/
private BlobProxy(byte[] bytes) {
- this.stream = new BinaryStreamImpl( bytes );
- this.length = bytes.length;
+ binaryStream = new BinaryStreamImpl( bytes );
}
/**
@@ -67,17 +66,17 @@ public class BlobProxy implements InvocationHandler {
* @see #generateProxy(java.io.InputStream, long)
*/
private BlobProxy(InputStream stream, long length) {
- this.stream = stream;
- this.length = length;
+ this.binaryStream = new StreamBackedBinaryStream( stream, length );
}
private long getLength() {
- return length;
+ return binaryStream.getLength();
}
private InputStream getStream() throws SQLException {
+ InputStream stream = binaryStream.getInputStream();
try {
- if (needsReset) {
+ if ( needsReset ) {
stream.reset();
}
}
@@ -94,6 +93,7 @@ public class BlobProxy implements InvocationHandler {
* @throws UnsupportedOperationException if any methods other than {@link Blob#length()}
* or {@link Blob#getBinaryStream} are invoked.
*/
+ @Override
@SuppressWarnings({ "UnnecessaryBoxing" })
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
final String methodName = method.getName();
@@ -102,6 +102,9 @@ public class BlobProxy implements InvocationHandler {
if ( "length".equals( methodName ) && argCount == 0 ) {
return Long.valueOf( getLength() );
}
+ if ( "getUnderlyingStream".equals( methodName ) ) {
+ return binaryStream;
+ }
if ( "getBinaryStream".equals( methodName ) ) {
if ( argCount == 0 ) {
return getStream();
@@ -137,7 +140,7 @@ public class BlobProxy implements InvocationHandler {
}
}
if ( "free".equals( methodName ) && argCount == 0 ) {
- stream.close();
+ binaryStream.release();
return null;
}
if ( "toString".equals( methodName ) && argCount == 0 ) {
@@ -197,4 +200,43 @@ public class BlobProxy implements InvocationHandler {
}
return cl;
}
+
+ private static class StreamBackedBinaryStream implements BinaryStream {
+ private final InputStream stream;
+ private final long length;
+
+ private byte[] bytes;
+
+ private StreamBackedBinaryStream(InputStream stream, long length) {
+ this.stream = stream;
+ this.length = length;
+ }
+
+ @Override
+ public InputStream getInputStream() {
+ return stream;
+ }
+
+ @Override
+ public byte[] getBytes() {
+ if ( bytes == null ) {
+ bytes = DataHelper.extractBytes( stream );
+ }
+ return bytes;
+ }
+
+ @Override
+ public long getLength() {
+ return (int) length;
+ }
+
+ @Override
+ public void release() {
+ try {
+ stream.close();
+ }
+ catch (IOException ignore) {
+ }
+ }
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/CharacterStream.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/CharacterStream.java
similarity index 76%
rename from hibernate-core/src/main/java/org/hibernate/type/descriptor/CharacterStream.java
rename to hibernate-core/src/main/java/org/hibernate/engine/jdbc/CharacterStream.java
index 4165b069c4..f90d6c3356 100644
--- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/CharacterStream.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/CharacterStream.java
@@ -21,7 +21,9 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
-package org.hibernate.type.descriptor;
+package org.hibernate.engine.jdbc;
+
+import java.io.InputStream;
import java.io.Reader;
/**
@@ -32,17 +34,28 @@ import java.io.Reader;
*/
public interface CharacterStream {
/**
- * Retrieve the reader.
+ * Provides access to the underlying data as a Reader.
*
* @return The reader.
*/
- public Reader getReader();
+ public Reader asReader();
/**
- * Retrieve the number of characters. JDBC 3 and earlier defined the length in terms of int type rather than
- * long type :(
+ * Provides access to the underlying data as a String.
+ *
+ * @return The underlying String data
+ */
+ public String asString();
+
+ /**
+ * Retrieve the number of characters.
*
* @return The number of characters.
*/
- public int getLength();
+ public long getLength();
+
+ /**
+ * Release any underlying resources.
+ */
+ public void release();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java
index 4beed7a499..5fb4df2a7d 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java
@@ -23,11 +23,16 @@
*/
package org.hibernate.engine.jdbc;
-
/**
* Marker interface for non-contextually created {@link java.sql.Clob} instances..
*
* @author Steve Ebersole
*/
public interface ClobImplementer {
+ /**
+ * Gets access to the data underlying this CLOB.
+ *
+ * @return Access to the underlying data.
+ */
+ public CharacterStream getUnderlyingStream();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java
index 8a5c88b26d..0ddc9455d3 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java
@@ -33,11 +33,12 @@ import java.lang.reflect.Proxy;
import java.sql.Clob;
import java.sql.SQLException;
+import org.hibernate.engine.jdbc.internal.CharacterStreamImpl;
import org.hibernate.type.descriptor.java.DataHelper;
/**
* Manages aspects of proxying {@link Clob Clobs} for non-contextual creation, including proxy creation and
- * handling proxy invocations.
+ * handling proxy invocations. We use proxies here solely to avoid JDBC version incompatibilities.
*
* @author Gavin King
* @author Steve Ebersole
@@ -46,12 +47,9 @@ import org.hibernate.type.descriptor.java.DataHelper;
public class ClobProxy implements InvocationHandler {
private static final Class[] PROXY_INTERFACES = new Class[] { Clob.class, ClobImplementer.class };
- private String string;
- private Reader reader;
- private long length;
+ private final CharacterStream characterStream;
private boolean needsReset = false;
-
/**
* Constructor used to build {@link Clob} from string data.
*
@@ -59,9 +57,7 @@ public class ClobProxy implements InvocationHandler {
* @see #generateProxy(String)
*/
protected ClobProxy(String string) {
- this.string = string;
- reader = new StringReader(string);
- length = string.length();
+ this.characterStream = new CharacterStreamImpl( string );
}
/**
@@ -72,28 +68,25 @@ public class ClobProxy implements InvocationHandler {
* @see #generateProxy(java.io.Reader, long)
*/
protected ClobProxy(Reader reader, long length) {
- this.reader = reader;
- this.length = length;
+ this.characterStream = new CharacterStreamImpl( reader, length );
}
protected long getLength() {
- return length;
+ return characterStream.getLength();
}
protected InputStream getAsciiStream() throws SQLException {
resetIfNeeded();
- return new ReaderInputStream( reader );
+ return new ReaderInputStream( characterStream.asReader() );
}
protected Reader getCharacterStream() throws SQLException {
resetIfNeeded();
- return reader;
+ return characterStream.asReader();
}
protected String getSubString(long start, int length) {
- if ( string == null ) {
- throw new UnsupportedOperationException( "Clob was not created from string; cannot substring" );
- }
+ final String string = characterStream.asString();
// semi-naive implementation
int endIndex = Math.min( ((int)start)+length, string.length() );
return string.substring( (int)start, endIndex );
@@ -105,6 +98,7 @@ public class ClobProxy implements InvocationHandler {
* @throws UnsupportedOperationException if any methods other than {@link Clob#length()},
* {@link Clob#getAsciiStream()}, or {@link Clob#getCharacterStream()} are invoked.
*/
+ @Override
@SuppressWarnings({ "UnnecessaryBoxing" })
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
final String methodName = method.getName();
@@ -113,6 +107,9 @@ public class ClobProxy implements InvocationHandler {
if ( "length".equals( methodName ) && argCount == 0 ) {
return Long.valueOf( getLength() );
}
+ if ( "getUnderlyingStream".equals( methodName ) ) {
+ return characterStream;
+ }
if ( "getAsciiStream".equals( methodName ) && argCount == 0 ) {
return getAsciiStream();
}
@@ -152,7 +149,7 @@ public class ClobProxy implements InvocationHandler {
return getSubString( start-1, length );
}
if ( "free".equals( methodName ) && argCount == 0 ) {
- reader.close();
+ characterStream.release();
return null;
}
if ( "toString".equals( methodName ) && argCount == 0 ) {
@@ -171,7 +168,7 @@ public class ClobProxy implements InvocationHandler {
protected void resetIfNeeded() throws SQLException {
try {
if ( needsReset ) {
- reader.reset();
+ characterStream.asReader().reset();
}
}
catch ( IOException ioe ) {
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java
index ae27d712f5..3ed10eae56 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java
@@ -59,9 +59,7 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat
return lobCreationContext.execute( CREATE_BLOB_CALLBACK );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Blob createBlob(byte[] bytes) {
try {
Blob blob = createBlob();
@@ -73,25 +71,11 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat
}
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Blob createBlob(InputStream inputStream, long length) {
- try {
- Blob blob = createBlob();
- OutputStream byteStream = blob.setBinaryStream( 1 );
- StreamUtils.copy( inputStream, byteStream );
- byteStream.flush();
- byteStream.close();
- // todo : validate length written versus length given?
- return blob;
- }
- catch ( SQLException e ) {
- throw new JDBCException( "Unable to prepare BLOB binary stream for writing",e );
- }
- catch ( IOException e ) {
- throw new HibernateException( "Unable to write stream contents to BLOB", e );
- }
+ // IMPL NOTE : it is inefficient to use JDBC LOB locator creation to create a LOB
+ // backed by a given stream. So just wrap the stream (which is what the NonContextualLobCreator does).
+ return NonContextualLobCreator.INSTANCE.createBlob( inputStream, length );
}
/**
@@ -103,9 +87,7 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat
return lobCreationContext.execute( CREATE_CLOB_CALLBACK );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Clob createClob(String string) {
try {
Clob clob = createClob();
@@ -117,24 +99,11 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat
}
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Clob createClob(Reader reader, long length) {
- try {
- Clob clob = createClob();
- Writer writer = clob.setCharacterStream( 1 );
- StreamUtils.copy( reader, writer );
- writer.flush();
- writer.close();
- return clob;
- }
- catch ( SQLException e ) {
- throw new JDBCException( "Unable to prepare CLOB stream for writing", e );
- }
- catch ( IOException e ) {
- throw new HibernateException( "Unable to write CLOB stream content", e );
- }
+ // IMPL NOTE : it is inefficient to use JDBC LOB locator creation to create a LOB
+ // backed by a given stream. So just wrap the stream (which is what the NonContextualLobCreator does).
+ return NonContextualLobCreator.INSTANCE.createClob( reader, length );
}
/**
@@ -146,9 +115,7 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat
return lobCreationContext.execute( CREATE_NCLOB_CALLBACK );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public NClob createNClob(String string) {
try {
NClob nclob = createNClob();
@@ -160,24 +127,11 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat
}
}
- /**
- * {@inheritDoc}
- */
+ @Override
public NClob createNClob(Reader reader, long length) {
- try {
- NClob nclob = createNClob();
- Writer writer = nclob.setCharacterStream( 1 );
- StreamUtils.copy( reader, writer );
- writer.flush();
- writer.close();
- return nclob;
- }
- catch ( SQLException e ) {
- throw new JDBCException( "Unable to prepare NCLOB stream for writing", e );
- }
- catch ( IOException e ) {
- throw new HibernateException( "Unable to write NCLOB stream content", e );
- }
+ // IMPL NOTE : it is inefficient to use JDBC LOB locator creation to create a LOB
+ // backed by a given stream. So just wrap the stream (which is what the NonContextualLobCreator does).
+ return NonContextualLobCreator.INSTANCE.createNClob( reader, length );
}
public static final LobCreationContext.Callback CREATE_BLOB_CALLBACK = new LobCreationContext.Callback() {
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java
index 74a8d72c7a..b1f6d39628 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java
@@ -30,8 +30,6 @@ import java.sql.NClob;
/**
* Contract for creating various LOB references.
- *
- * @todo LobCreator really needs to be an api since we expose it to users.
*
* @author Steve Ebersole
* @author Gail Badner
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java
index cbd7ac3a15..715f57e767 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.io.Reader;
import java.lang.reflect.Proxy;
import java.sql.Clob;
@@ -29,10 +30,10 @@ import java.sql.NClob;
/**
* Manages aspects of proxying java.sql.NClobs for non-contextual creation, including proxy creation and
- * handling proxy invocations.
+ * handling proxy invocations. We use proxies here solely to avoid JDBC version incompatibilities.
*
- * Generated proxies are typed as {@link java.sql.Clob} (java.sql.NClob extends {@link java.sql.Clob}) and in JDK 1.6 environments, they
- * are also typed to java.sql.NClob
+ * Generated proxies are typed as {@link java.sql.Clob} (java.sql.NClob extends {@link java.sql.Clob})
+ * and in JDK 1.6+ environments, they are also typed to java.sql.NClob
*
* @author Steve Ebersole
*/
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java
index 8b370e48a6..93b5ec9db0 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.io.InputStream;
import java.io.Reader;
import java.sql.Blob;
@@ -41,44 +42,32 @@ public class NonContextualLobCreator extends AbstractLobCreator implements LobCr
private NonContextualLobCreator() {
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Blob createBlob(byte[] bytes) {
return BlobProxy.generateProxy( bytes );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Blob createBlob(InputStream stream, long length) {
return BlobProxy.generateProxy( stream, length );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Clob createClob(String string) {
return ClobProxy.generateProxy( string );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Clob createClob(Reader reader, long length) {
return ClobProxy.generateProxy( reader, length );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public NClob createNClob(String string) {
return NClobProxy.generateProxy( string );
}
- /**
- * {@inheritDoc}
- */
+ @Override
public NClob createNClob(Reader reader, long length) {
return NClobProxy.generateProxy( reader, length );
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java
index 7728bd4bae..4c2fb9e248 100755
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.engine.jdbc;
import java.io.IOException;
@@ -42,5 +41,4 @@ public class ReaderInputStream extends InputStream {
public int read() throws IOException {
return reader.read();
}
-
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java
index 24d4b9f15b..c7f16de4a7 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.io.Serializable;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
@@ -62,9 +63,7 @@ public class SerializableBlobProxy implements InvocationHandler, Serializable {
}
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if ( "getWrappedBlob".equals( method.getName() ) ) {
return getWrappedBlob();
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java
index 1d53e6bbd4..a092ff710a 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.io.Serializable;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
@@ -62,9 +63,7 @@ public class SerializableClobProxy implements InvocationHandler, Serializable {
}
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if ( "getWrappedClob".equals( method.getName() ) ) {
return getWrappedClob();
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java
index 954bb80d64..2314f92d3a 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.lang.reflect.Proxy;
import java.sql.Clob;
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java
index dad827d309..205030b03e 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java
index 7b3e282728..0885e46697 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.sql.Blob;
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java
index 8dfb72f78d..97f621d78b 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java
@@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc;
+
import java.sql.Clob;
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java
index 3b9d67da7b..6fd2f82c20 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java
@@ -29,6 +29,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Map;
import java.util.Properties;
+import java.util.concurrent.atomic.AtomicInteger;
import org.jboss.logging.Logger;
@@ -68,7 +69,7 @@ public class DriverManagerConnectionProviderImpl
private boolean autocommit;
private final ArrayList pool = new ArrayList();
- private int checkedOut = 0;
+ private final AtomicInteger checkedOut = new AtomicInteger();
private boolean stopped;
@@ -93,7 +94,7 @@ public class DriverManagerConnectionProviderImpl
}
public void configure(Map configurationValues) {
- LOG.usingHibernateBuiltInConnectionPool();
+ LOG.usingHibernateBuiltInConnectionPool();
String driverClassName = (String) configurationValues.get( AvailableSettings.DRIVER );
if ( driverClassName == null ) {
@@ -127,18 +128,19 @@ public class DriverManagerConnectionProviderImpl
}
poolSize = ConfigurationHelper.getInt( AvailableSettings.POOL_SIZE, configurationValues, 20 ); // default pool size 20
- LOG.hibernateConnectionPoolSize(poolSize);
+ LOG.hibernateConnectionPoolSize( poolSize );
autocommit = ConfigurationHelper.getBoolean( AvailableSettings.AUTOCOMMIT, configurationValues );
- LOG.autoCommitMode( autocommit );
+ LOG.autoCommitMode( autocommit );
isolation = ConfigurationHelper.getInteger( AvailableSettings.ISOLATION, configurationValues );
- if (isolation != null) LOG.jdbcIsolationLevel(Environment.isolationLevelToString(isolation.intValue()));
+ if ( isolation != null )
+ LOG.jdbcIsolationLevel( Environment.isolationLevelToString( isolation.intValue() ) );
url = (String) configurationValues.get( AvailableSettings.URL );
if ( url == null ) {
- String msg = LOG.jdbcUrlNotSpecified(AvailableSettings.URL);
- LOG.error(msg);
+ String msg = LOG.jdbcUrlNotSpecified( AvailableSettings.URL );
+ LOG.error( msg );
throw new HibernateException( msg );
}
@@ -168,13 +170,14 @@ public class DriverManagerConnectionProviderImpl
}
public Connection getConnection() throws SQLException {
- LOG.tracev( "Total checked-out connections: {0}", checkedOut );
+ final boolean traceEnabled = LOG.isTraceEnabled();
+ if ( traceEnabled ) LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() );
// essentially, if we have available connections in the pool, use one...
synchronized (pool) {
if ( !pool.isEmpty() ) {
int last = pool.size() - 1;
- LOG.tracev( "Using pooled JDBC connection, pool size: {0}", last );
+ if ( traceEnabled ) LOG.tracev( "Using pooled JDBC connection, pool size: {0}", last );
Connection pooled = pool.remove( last );
if ( isolation != null ) {
pooled.setTransactionIsolation( isolation.intValue() );
@@ -182,14 +185,16 @@ public class DriverManagerConnectionProviderImpl
if ( pooled.getAutoCommit() != autocommit ) {
pooled.setAutoCommit( autocommit );
}
- checkedOut++;
+ checkedOut.incrementAndGet();
return pooled;
}
}
// otherwise we open a new connection...
- LOG.debug( "Opening new JDBC connection" );
+ final boolean debugEnabled = LOG.isDebugEnabled();
+ if ( debugEnabled ) LOG.debug( "Opening new JDBC connection" );
+
Connection conn = DriverManager.getConnection( url, connectionProps );
if ( isolation != null ) {
conn.setTransactionIsolation( isolation.intValue() );
@@ -198,23 +203,24 @@ public class DriverManagerConnectionProviderImpl
conn.setAutoCommit(autocommit);
}
- if ( LOG.isDebugEnabled() ) {
+ if ( debugEnabled ) {
LOG.debugf( "Created connection to: %s, Isolation Level: %s", url, conn.getTransactionIsolation() );
}
- checkedOut++;
+ checkedOut.incrementAndGet();
return conn;
}
public void closeConnection(Connection conn) throws SQLException {
- checkedOut--;
+ checkedOut.decrementAndGet();
+ final boolean traceEnabled = LOG.isTraceEnabled();
// add to the pool if the max size is not yet reached.
- synchronized (pool) {
+ synchronized ( pool ) {
int currentSize = pool.size();
if ( currentSize < poolSize ) {
- LOG.tracev( "Returning connection to pool, pool size: {0}", ( currentSize + 1 ) );
- pool.add(conn);
+ if ( traceEnabled ) LOG.tracev( "Returning connection to pool, pool size: {0}", ( currentSize + 1 ) );
+ pool.add( conn );
return;
}
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java
index f26633f1ea..415d0b5462 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java
@@ -63,7 +63,7 @@ import org.hibernate.internal.CoreMessageLogger;
public class StandardDialectResolver extends AbstractDialectResolver {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
- StandardDialectResolver.class.getName());
+ StandardDialectResolver.class.getName());
@Override
protected Dialect resolveDialectInternal(DatabaseMetaData metaData) throws SQLException {
@@ -88,7 +88,7 @@ public class StandardDialectResolver extends AbstractDialectResolver {
if ( "PostgreSQL".equals( databaseName ) ) {
final int databaseMinorVersion = metaData.getDatabaseMinorVersion();
- if (databaseMajorVersion >= 8 && databaseMinorVersion >= 2) {
+ if ( databaseMajorVersion > 8 || ( databaseMajorVersion == 8 && databaseMinorVersion >= 2 ) ) {
return new PostgreSQL82Dialect();
}
return new PostgreSQL81Dialect();
@@ -111,7 +111,7 @@ public class StandardDialectResolver extends AbstractDialectResolver {
}
if ( "ingres".equalsIgnoreCase( databaseName ) ) {
- switch( databaseMajorVersion ) {
+ switch ( databaseMajorVersion ) {
case 9:
int databaseMinorVersion = metaData.getDatabaseMinorVersion();
if (databaseMinorVersion > 2) {
@@ -133,6 +133,7 @@ public class StandardDialectResolver extends AbstractDialectResolver {
case 9:
return new SQLServer2005Dialect();
case 10:
+ case 11:
return new SQLServer2008Dialect();
default:
LOG.unknownSqlServerVersion(databaseMajorVersion);
diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BinaryStreamImpl.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/BinaryStreamImpl.java
similarity index 86%
rename from hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BinaryStreamImpl.java
rename to hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/BinaryStreamImpl.java
index 078c0b25ad..b687105af4 100644
--- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BinaryStreamImpl.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/BinaryStreamImpl.java
@@ -21,12 +21,13 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
-package org.hibernate.type.descriptor.java;
+package org.hibernate.engine.jdbc.internal;
import java.io.ByteArrayInputStream;
+import java.io.IOException;
import java.io.InputStream;
-import org.hibernate.type.descriptor.BinaryStream;
+import org.hibernate.engine.jdbc.BinaryStream;
/**
* Implementation of {@link BinaryStream}
@@ -50,7 +51,16 @@ public class BinaryStreamImpl extends ByteArrayInputStream implements BinaryStre
return buf;
}
- public int getLength() {
+ public long getLength() {
return length;
}
+
+ @Override
+ public void release() {
+ try {
+ super.close();
+ }
+ catch (IOException ignore) {
+ }
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/CharacterStreamImpl.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/CharacterStreamImpl.java
new file mode 100644
index 0000000000..d61e07a4c5
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/CharacterStreamImpl.java
@@ -0,0 +1,87 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.engine.jdbc.internal;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+import java.io.StringReader;
+
+import org.hibernate.engine.jdbc.CharacterStream;
+import org.hibernate.type.descriptor.java.DataHelper;
+
+/**
+ * Implementation of {@link CharacterStream}
+ *
+ * @author Steve Ebersole
+ */
+public class CharacterStreamImpl implements CharacterStream {
+ private final long length;
+
+ private Reader reader;
+ private String string;
+
+ public CharacterStreamImpl(String chars) {
+ this.string = chars;
+ this.length = chars.length();
+ }
+
+ public CharacterStreamImpl(Reader reader, long length) {
+ this.reader = reader;
+ this.length = length;
+ }
+
+ @Override
+ public Reader asReader() {
+ if ( reader == null ) {
+ reader = new StringReader( string );
+ }
+ return reader;
+ }
+
+ @Override
+ public String asString() {
+ if ( string == null ) {
+ string = DataHelper.extractString( reader );
+ }
+ return string;
+ }
+
+ @Override
+ public long getLength() {
+ return length;
+ }
+
+ @Override
+ public void release() {
+ if ( reader == null ) {
+ return;
+ }
+ try {
+ reader.close();
+ }
+ catch (IOException ignore) {
+ }
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java
index 421a64422c..b1824d7e47 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java
@@ -31,8 +31,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.jboss.logging.Logger;
-
import org.hibernate.CacheMode;
import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
@@ -48,6 +46,7 @@ import org.hibernate.engine.spi.Status;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
+import org.jboss.logging.Logger;
/**
* Represents state associated with the processing of a given {@link ResultSet}
@@ -253,7 +252,12 @@ public class CollectionLoadContext {
}
else {
ce.postInitialize( lce.getCollection() );
+// if (ce.getLoadedPersister().getBatchSize() > 1) { // not the best place for doing this, moved into ce.postInitialize
+// getLoadContext().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(ce);
+// }
}
+
+
boolean addToCache = hasNoQueuedAdds && // there were no queued additions
persister.hasCache() && // and the role has a cache
@@ -266,7 +270,7 @@ public class CollectionLoadContext {
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"Collection fully initialized: %s",
- MessageHelper.collectionInfoString(persister, lce.getKey(), session.getFactory())
+ MessageHelper.collectionInfoString(persister, lce.getCollection(), lce.getKey(), session)
);
}
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
@@ -285,7 +289,7 @@ public class CollectionLoadContext {
final SessionFactoryImplementor factory = session.getFactory();
if ( LOG.isDebugEnabled() ) {
- LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) );
+ LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) );
}
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
@@ -318,7 +322,7 @@ public class CollectionLoadContext {
if ( collectionOwner == null ) {
throw new HibernateException(
"Unable to resolve owner of loading collection [" +
- MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) +
+ MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) +
"] for second level caching"
);
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java
index ba56e22870..5a6e4a6cf2 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java
@@ -201,11 +201,6 @@ public class LoadContexts {
}
return lce.getCollection();
}
- // TODO : should really move this log statement to CollectionType, where this is used from...
- if ( LOG.isTraceEnabled() ) {
- LOG.tracef( "Creating collection wrapper: %s",
- MessageHelper.collectionInfoString( persister, ownerKey, getSession().getFactory() ) );
- }
return null;
}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java
index 0afe801f3d..37d5c8f7d5 100755
--- a/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java
@@ -25,15 +25,17 @@ package org.hibernate.engine.spi;
import java.io.Serializable;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
import java.util.Map;
+import java.util.Map.Entry;
+
+import org.jboss.logging.Logger;
import org.hibernate.EntityMode;
import org.hibernate.cache.spi.CacheKey;
import org.hibernate.collection.spi.PersistentCollection;
-import org.hibernate.internal.util.MarkerObject;
-import org.hibernate.internal.util.collections.IdentityMap;
+import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
@@ -43,33 +45,35 @@ import org.hibernate.persister.entity.EntityPersister;
* can be re-used as a subquery for loading owned collections.
*
* @author Gavin King
+ * @author Steve Ebersole
+ * @author Guenther Demetz
*/
public class BatchFetchQueue {
+ private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, BatchFetchQueue.class.getName() );
- public static final Object MARKER = new MarkerObject( "MARKER" );
-
- /**
- * Defines a sequence of {@link EntityKey} elements that are currently
- * elegible for batch-fetching.
- *
- * Even though this is a map, we only use the keys. A map was chosen in
- * order to utilize a {@link LinkedHashMap} to maintain sequencing
- * as well as uniqueness.
- *
- * TODO : this would be better as a SequencedReferenceSet, but no such beast exists!
- */
- private final Map batchLoadableEntityKeys = new LinkedHashMap(8);
+ private final PersistenceContext context;
/**
* A map of {@link SubselectFetch subselect-fetch descriptors} keyed by the
* {@link EntityKey) against which the descriptor is registered.
*/
- private final Map subselectsByEntityKey = new HashMap(8);
+ private final Map subselectsByEntityKey = new HashMap(8);
/**
- * The owning persistence context.
+ * Used to hold information about the entities that are currently eligible for batch-fetching. Ultimately
+ * used by {@link #getEntityBatch} to build entity load batches.
+ *
+ * A Map structure is used to segment the keys by entity type since loading can only be done for a particular entity
+ * type at a time.
*/
- private final PersistenceContext context;
+ private final Map > batchLoadableEntityKeys = new HashMap >(8);
+
+ /**
+ * Used to hold information about the collections that are currently eligible for batch-fetching. Ultimately
+ * used by {@link #getCollectionBatch} to build collection load batches.
+ */
+ private final Map> batchLoadableCollections =
+ new HashMap>(8);
/**
* Constructs a queue for the given context.
@@ -85,9 +89,13 @@ public class BatchFetchQueue {
*/
public void clear() {
batchLoadableEntityKeys.clear();
+ batchLoadableCollections.clear();
subselectsByEntityKey.clear();
}
+
+ // sub-select support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
/**
* Retrieve the fetch descriptor associated with the given entity key.
*
@@ -96,7 +104,7 @@ public class BatchFetchQueue {
* this entity key.
*/
public SubselectFetch getSubselect(EntityKey key) {
- return (SubselectFetch) subselectsByEntityKey.get(key);
+ return subselectsByEntityKey.get( key );
}
/**
@@ -106,7 +114,7 @@ public class BatchFetchQueue {
* @param subquery The fetch descriptor.
*/
public void addSubselect(EntityKey key, SubselectFetch subquery) {
- subselectsByEntityKey.put(key, subquery);
+ subselectsByEntityKey.put( key, subquery );
}
/**
@@ -116,7 +124,7 @@ public class BatchFetchQueue {
* need to load its collections)
*/
public void removeSubselect(EntityKey key) {
- subselectsByEntityKey.remove(key);
+ subselectsByEntityKey.remove( key );
}
/**
@@ -128,6 +136,9 @@ public class BatchFetchQueue {
subselectsByEntityKey.clear();
}
+
+ // entity batch support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
/**
* If an EntityKey represents a batch loadable entity, add
* it to the queue.
@@ -140,9 +151,15 @@ public class BatchFetchQueue {
*/
public void addBatchLoadableEntityKey(EntityKey key) {
if ( key.isBatchLoadable() ) {
- batchLoadableEntityKeys.put( key, MARKER );
+ LinkedHashSet set = batchLoadableEntityKeys.get( key.getEntityName());
+ if (set == null) {
+ set = new LinkedHashSet(8);
+ batchLoadableEntityKeys.put( key.getEntityName(), set);
+ }
+ set.add(key);
}
}
+
/**
* After evicting or deleting or loading an entity, we don't
@@ -150,69 +167,12 @@ public class BatchFetchQueue {
* if necessary
*/
public void removeBatchLoadableEntityKey(EntityKey key) {
- if ( key.isBatchLoadable() ) batchLoadableEntityKeys.remove(key);
- }
-
- /**
- * Get a batch of uninitialized collection keys for a given role
- *
- * @param collectionPersister The persister for the collection role.
- * @param id A key that must be included in the batch fetch
- * @param batchSize the maximum number of keys to return
- * @return an array of collection keys, of length batchSize (padded with nulls)
- */
- public Serializable[] getCollectionBatch(
- final CollectionPersister collectionPersister,
- final Serializable id,
- final int batchSize) {
- Serializable[] keys = new Serializable[batchSize];
- keys[0] = id;
- int i = 1;
- //int count = 0;
- int end = -1;
- boolean checkForEnd = false;
- // this only works because collection entries are kept in a sequenced
- // map by persistence context (maybe we should do like entities and
- // keep a separate sequences set...)
-
- for ( Map.Entry me :
- IdentityMap.concurrentEntries( (Map) context.getCollectionEntries() )) {
-
- CollectionEntry ce = me.getValue();
- PersistentCollection collection = me.getKey();
- if ( !collection.wasInitialized() && ce.getLoadedPersister() == collectionPersister ) {
-
- if ( checkForEnd && i == end ) {
- return keys; //the first key found after the given key
- }
-
- //if ( end == -1 && count > batchSize*10 ) return keys; //try out ten batches, max
-
- final boolean isEqual = collectionPersister.getKeyType().isEqual(
- id,
- ce.getLoadedKey(),
- collectionPersister.getFactory()
- );
-
- if ( isEqual ) {
- end = i;
- //checkForEnd = false;
- }
- else if ( !isCached( ce.getLoadedKey(), collectionPersister ) ) {
- keys[i++] = ce.getLoadedKey();
- //count++;
- }
-
- if ( i == batchSize ) {
- i = 1; //end of array, start filling again from start
- if ( end != -1 ) {
- checkForEnd = true;
- }
- }
+ if ( key.isBatchLoadable() ) {
+ LinkedHashSet set = batchLoadableEntityKeys.get( key.getEntityName());
+ if (set != null) {
+ set.remove(key);
}
-
}
- return keys; //we ran out of keys to try
}
/**
@@ -236,10 +196,11 @@ public class BatchFetchQueue {
int end = -1;
boolean checkForEnd = false;
- Iterator iter = batchLoadableEntityKeys.keySet().iterator();
- while ( iter.hasNext() ) {
- EntityKey key = (EntityKey) iter.next();
- if ( key.getEntityName().equals( persister.getEntityName() ) ) { //TODO: this needn't exclude subclasses...
+ // TODO: this needn't exclude subclasses...
+
+ LinkedHashSet set = batchLoadableEntityKeys.get( persister.getEntityName() );
+ if ( set != null ) {
+ for ( EntityKey key : set ) {
if ( checkForEnd && i == end ) {
//the first id found after the given id
return ids;
@@ -253,8 +214,10 @@ public class BatchFetchQueue {
}
}
if ( i == batchSize ) {
- i = 1; //end of array, start filling again from start
- if (end!=-1) checkForEnd = true;
+ i = 1; // end of array, start filling again from start
+ if ( end != -1 ) {
+ checkForEnd = true;
+ }
}
}
}
@@ -272,6 +235,98 @@ public class BatchFetchQueue {
}
return false;
}
+
+
+ // collection batch support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ /**
+ * If an CollectionEntry represents a batch loadable collection, add
+ * it to the queue.
+ */
+ public void addBatchLoadableCollection(PersistentCollection collection, CollectionEntry ce) {
+ final CollectionPersister persister = ce.getLoadedPersister();
+
+ LinkedHashMap map = batchLoadableCollections.get( persister.getRole() );
+ if ( map == null ) {
+ map = new LinkedHashMap( 16 );
+ batchLoadableCollections.put( persister.getRole(), map );
+ }
+ map.put( ce, collection );
+ }
+
+ /**
+ * After a collection was initialized or evicted, we don't
+ * need to batch fetch it anymore, remove it from the queue
+ * if necessary
+ */
+ public void removeBatchLoadableCollection(CollectionEntry ce) {
+ LinkedHashMap map = batchLoadableCollections.get( ce.getLoadedPersister().getRole() );
+ if ( map != null ) {
+ map.remove( ce );
+ }
+ }
+
+ /**
+ * Get a batch of uninitialized collection keys for a given role
+ *
+ * @param collectionPersister The persister for the collection role.
+ * @param id A key that must be included in the batch fetch
+ * @param batchSize the maximum number of keys to return
+ * @return an array of collection keys, of length batchSize (padded with nulls)
+ */
+ public Serializable[] getCollectionBatch(
+ final CollectionPersister collectionPersister,
+ final Serializable id,
+ final int batchSize) {
+
+ Serializable[] keys = new Serializable[batchSize];
+ keys[0] = id;
+
+ int i = 1;
+ int end = -1;
+ boolean checkForEnd = false;
+
+ final LinkedHashMap map = batchLoadableCollections.get( collectionPersister.getRole() );
+ if ( map != null ) {
+ for ( Entry me : map.entrySet() ) {
+ final CollectionEntry ce = me.getKey();
+ final PersistentCollection collection = me.getValue();
+
+ if ( collection.wasInitialized() ) {
+ // should never happen
+ LOG.warn( "Encountered initialized collection in BatchFetchQueue, this should not happen." );
+ continue;
+ }
+
+ if ( checkForEnd && i == end ) {
+ return keys; //the first key found after the given key
+ }
+
+ final boolean isEqual = collectionPersister.getKeyType().isEqual(
+ id,
+ ce.getLoadedKey(),
+ collectionPersister.getFactory()
+ );
+
+ if ( isEqual ) {
+ end = i;
+ //checkForEnd = false;
+ }
+ else if ( !isCached( ce.getLoadedKey(), collectionPersister ) ) {
+ keys[i++] = ce.getLoadedKey();
+ //count++;
+ }
+
+ if ( i == batchSize ) {
+ i = 1; //end of array, start filling again from start
+ if ( end != -1 ) {
+ checkForEnd = true;
+ }
+ }
+ }
+ }
+ return keys; //we ran out of keys to try
+ }
private boolean isCached(Serializable collectionKey, CollectionPersister persister) {
if ( persister.hasCache() ) {
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java
index 1edb372459..fcd269acba 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java
@@ -34,6 +34,7 @@ import org.jboss.logging.Logger;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
+import org.hibernate.collection.internal.AbstractPersistentCollection;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.collection.CollectionPersister;
@@ -215,6 +216,9 @@ public final class CollectionEntry implements Serializable {
collection.getSnapshot( getLoadedPersister() ) :
null;
collection.setSnapshot(loadedKey, role, snapshot);
+ if (getLoadedPersister().getBatchSize() > 1) {
+ ((AbstractPersistentCollection) collection).getSession().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(this);
+ }
}
/**
@@ -260,6 +264,20 @@ public final class CollectionEntry implements Serializable {
return snapshot;
}
+ /**
+ * Reset the stored snapshot for both the persistent collection and this collection entry.
+ * Used during the merge of detached collections.
+ *
+ * @param collection the persistentcollection to be updated
+ * @param storedSnapshot the new stored snapshot
+ */
+ public void resetStoredSnapshot(PersistentCollection collection, Serializable storedSnapshot) {
+ LOG.debugf("Reset storedSnapshot to %s for %s", storedSnapshot, this);
+
+ snapshot = storedSnapshot;
+ collection.setSnapshot(loadedKey, role, snapshot);
+ }
+
private void setLoadedPersister(CollectionPersister persister) {
loadedPersister = persister;
setRole( persister == null ? null : persister.getRole() );
@@ -418,4 +436,4 @@ public final class CollectionEntry implements Serializable {
( session == null ? null : session.getFactory() )
);
}
-}
\ No newline at end of file
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java
index e245d80695..05ee64c067 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java
@@ -270,8 +270,14 @@ public final class EntityEntry implements Serializable {
}
public Object getLoadedValue(String propertyName) {
- int propertyIndex = ( (UniqueKeyLoadable) persister ).getPropertyIndex(propertyName);
- return loadedState[propertyIndex];
+ if ( loadedState == null ) {
+ return null;
+ }
+ else {
+ int propertyIndex = ( (UniqueKeyLoadable) persister )
+ .getPropertyIndex( propertyName );
+ return loadedState[propertyIndex];
+ }
}
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java
index 66f2643eb4..c5659ad956 100644
--- a/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java
+++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java
@@ -830,6 +830,15 @@ public interface PersistenceContext {
* of old values as no longer valid.
*/
public void cleanupFromSynchronizations();
+
+ /**
+ * Called on {@link org.hibernate.Session#evict} to give a chance to clean up natural-id cross refs.
+ *
+ * @param object The entity instance.
+ * @param persister The entity persister
+ * @param identifier The entity identifier
+ */
+ public void handleEviction(Object object, EntityPersister persister, Serializable identifier);
}
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java
index 8bf320502b..1c58989865 100644
--- a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java
+++ b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java
@@ -80,7 +80,7 @@ public class DefaultEvictEventListener implements EvictEventListener {
if ( !li.isUninitialized() ) {
final Object entity = persistenceContext.removeEntity( key );
if ( entity != null ) {
- EntityEntry e = event.getSession().getPersistenceContext().removeEntry( entity );
+ EntityEntry e = persistenceContext.removeEntry( entity );
doEvict( entity, key, e.getPersister(), event.getSession() );
}
}
@@ -106,6 +106,10 @@ public class DefaultEvictEventListener implements EvictEventListener {
LOG.tracev( "Evicting {0}", MessageHelper.infoString( persister ) );
}
+ if ( persister.hasNaturalIdentifier() ) {
+ session.getPersistenceContext().getNaturalIdHelper().handleEviction( object, persister, key.getIdentifier() );
+ }
+
// remove all collections for the entity from the session-level cache
if ( persister.hasCollections() ) {
new EvictVisitor( session ).process( object, persister );
diff --git a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java
index 69ce7ce476..033682d5df 100755
--- a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java
+++ b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java
@@ -25,8 +25,6 @@ package org.hibernate.event.internal;
import java.io.Serializable;
-import org.jboss.logging.Logger;
-
import org.hibernate.HibernateException;
import org.hibernate.cache.spi.CacheKey;
import org.hibernate.cache.spi.entry.CollectionCacheEntry;
@@ -40,6 +38,7 @@ import org.hibernate.event.spi.InitializeCollectionEventListener;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
+import org.jboss.logging.Logger;
/**
* @author Gavin King
@@ -62,8 +61,7 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
if ( !collection.wasInitialized() ) {
if ( LOG.isTraceEnabled() ) {
LOG.tracev( "Initializing collection {0}",
- MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(),
- source.getFactory() ) );
+ MessageHelper.collectionInfoString( ce.getLoadedPersister(), collection, ce.getLoadedKey(), source ) );
}
LOG.trace( "Checking second-level cache" );
diff --git a/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java b/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java
index dcc18fa019..e05487ed1a 100644
--- a/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java
+++ b/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java
@@ -79,8 +79,12 @@ public class EvictVisitor extends AbstractVisitor {
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Evicting collection: %s",
MessageHelper.collectionInfoString( ce.getLoadedPersister(),
+ collection,
ce.getLoadedKey(),
- getSession().getFactory() ) );
+ getSession() ) );
+ }
+ if (ce.getLoadedPersister() != null && ce.getLoadedPersister().getBatchSize() > 1) {
+ getSession().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(ce);
}
if ( ce.getLoadedPersister() != null && ce.getLoadedKey() != null ) {
//TODO: is this 100% correct?
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java
index 66403b0adf..b9f0bbd8e5 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java
@@ -383,11 +383,15 @@ public final class HqlParser extends HqlBaseParser {
@Override
public void processMemberOf(Token n, AST p, ASTPair currentAST) {
- AST inAst = n == null ? astFactory.create( IN, "in" ) : astFactory.create( NOT_IN, "not in" );
- astFactory.makeASTRoot( currentAST, inAst );
- AST ast = createSubquery( p );
- ast = ASTUtil.createParent( astFactory, IN_LIST, "inList", ast );
- inAst.addChild( ast );
+ // convert MEMBER OF to the equivalent IN ELEMENTS structure...
+ AST inNode = n == null ? astFactory.create( IN, "in" ) : astFactory.create( NOT_IN, "not in" );
+ astFactory.makeASTRoot( currentAST, inNode );
+
+ AST inListNode = astFactory.create( IN_LIST, "inList" );
+ inNode.addChild( inListNode );
+ AST elementsNode = astFactory.create( ELEMENTS, "elements" );
+ inListNode.addChild( elementsNode );
+ elementsNode.addChild( p );
}
static public void panic() {
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java
index 04b7feb2e0..2c2aeaf164 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java
@@ -31,9 +31,12 @@ import java.util.List;
import antlr.RecognitionException;
import org.hibernate.HibernateException;
+import org.hibernate.action.internal.BulkOperationCleanupAction;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.RowSelection;
+import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.QuerySyntaxException;
import org.hibernate.hql.internal.ast.SqlGenerator;
@@ -45,17 +48,17 @@ import org.hibernate.persister.entity.Queryable;
*
* @author Steve Ebersole
*/
-public class BasicExecutor extends AbstractStatementExecutor {
-
+public class BasicExecutor implements StatementExecutor {
+ private final SessionFactoryImplementor factory;
private final Queryable persister;
private final String sql;
private final List parameterSpecifications;
public BasicExecutor(HqlSqlWalker walker, Queryable persister) {
- super(walker, null);
+ this.factory = walker.getSessionFactoryHelper().getFactory();
this.persister = persister;
try {
- SqlGenerator gen = new SqlGenerator( getFactory() );
+ SqlGenerator gen = new SqlGenerator( factory );
gen.statement( walker.getAST() );
sql = gen.getSQL();
gen.getParseErrorHandler().throwQueryException();
@@ -71,8 +74,13 @@ public class BasicExecutor extends AbstractStatementExecutor {
}
public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException {
-
- coordinateSharedCacheCleanup( session );
+ BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, persister );
+ if ( session.isEventSource() ) {
+ ( (EventSource) session ).getActionQueue().addAction( action );
+ }
+ else {
+ action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
+ }
PreparedStatement st = null;
RowSelection selection = parameters.getRowSelection();
@@ -101,16 +109,7 @@ public class BasicExecutor extends AbstractStatementExecutor {
}
}
catch( SQLException sqle ) {
- throw getFactory().getSQLExceptionHelper().convert(
- sqle,
- "could not execute update query",
- sql
- );
+ throw factory.getSQLExceptionHelper().convert( sqle, "could not execute update query", sql );
}
}
-
- @Override
- protected Queryable[] getAffectedQueryables() {
- return new Queryable[] { persister };
- }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java
index 5c2cb36ed6..b0b5a75e22 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,147 +20,46 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.hql.internal.ast.exec;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.Iterator;
-
-import org.jboss.logging.Logger;
-
import org.hibernate.HibernateException;
+import org.hibernate.action.internal.BulkOperationCleanupAction;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
-import org.hibernate.hql.internal.ast.tree.DeleteStatement;
-import org.hibernate.hql.internal.ast.tree.FromElement;
-import org.hibernate.internal.CoreMessageLogger;
-import org.hibernate.internal.util.StringHelper;
-import org.hibernate.param.ParameterSpecification;
-import org.hibernate.persister.entity.Queryable;
-import org.hibernate.sql.Delete;
+import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
/**
* Implementation of MultiTableDeleteExecutor.
*
* @author Steve Ebersole
*/
-public class MultiTableDeleteExecutor extends AbstractStatementExecutor {
-
- private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
- MultiTableDeleteExecutor.class.getName());
-
- private final Queryable persister;
- private final String idInsertSelect;
- private final String[] deletes;
+public class MultiTableDeleteExecutor implements StatementExecutor {
+ private final MultiTableBulkIdStrategy.DeleteHandler deleteHandler;
public MultiTableDeleteExecutor(HqlSqlWalker walker) {
- super(walker, null);
-
- if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) {
- throw new HibernateException( "cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" );
- }
-
- DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST();
- FromElement fromElement = deleteStatement.getFromClause().getFromElement();
- String bulkTargetAlias = fromElement.getTableAlias();
- this.persister = fromElement.getQueryable();
-
- this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, deleteStatement.getWhereClause() );
- LOG.tracev( "Generated ID-INSERT-SELECT SQL (multi-table delete) : {0}", idInsertSelect );
-
- String[] tableNames = persister.getConstraintOrderedTableNameClosure();
- String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure();
- String idSubselect = generateIdSubselect( persister );
-
- deletes = new String[tableNames.length];
- for ( int i = tableNames.length - 1; i >= 0; i-- ) {
- // TODO : an optimization here would be to consider cascade deletes and not gen those delete statements;
- // the difficulty is the ordering of the tables here vs the cascade attributes on the persisters ->
- // the table info gotten here should really be self-contained (i.e., a class representation
- // defining all the needed attributes), then we could then get an array of those
- final Delete delete = new Delete()
- .setTableName( tableNames[i] )
- .setWhere( "(" + StringHelper.join( ", ", columnNames[i] ) + ") IN (" + idSubselect + ")" );
- if ( getFactory().getSettings().isCommentsEnabled() ) {
- delete.setComment( "bulk delete" );
- }
-
- deletes[i] = delete.toStatementString();
- }
+ MultiTableBulkIdStrategy strategy = walker.getSessionFactoryHelper()
+ .getFactory()
+ .getSettings()
+ .getMultiTableBulkIdStrategy();
+ this.deleteHandler = strategy.buildDeleteHandler( walker.getSessionFactoryHelper().getFactory(), walker );
}
public String[] getSqlStatements() {
- return deletes;
+ return deleteHandler.getSqlStatements();
}
public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException {
- coordinateSharedCacheCleanup( session );
-
- createTemporaryTableIfNecessary( persister, session );
-
- try {
- // First, save off the pertinent ids, saving the number of pertinent ids for return
- PreparedStatement ps = null;
- int resultCount = 0;
- try {
- try {
- ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
- Iterator paramSpecifications = getIdSelectParameterSpecifications().iterator();
- int pos = 1;
- while ( paramSpecifications.hasNext() ) {
- final ParameterSpecification paramSpec = ( ParameterSpecification ) paramSpecifications.next();
- pos += paramSpec.bind( ps, parameters, session, pos );
- }
- resultCount = ps.executeUpdate();
- }
- finally {
- if ( ps != null ) {
- ps.close();
- }
- }
- }
- catch( SQLException e ) {
- throw getFactory().getSQLExceptionHelper().convert(
- e,
- "could not insert/select ids for bulk delete",
- idInsertSelect
- );
- }
-
- // Start performing the deletes
- for ( int i = 0; i < deletes.length; i++ ) {
- try {
- try {
- ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( deletes[i], false );
- ps.executeUpdate();
- }
- finally {
- if ( ps != null ) {
- ps.close();
- }
- }
- }
- catch( SQLException e ) {
- throw getFactory().getSQLExceptionHelper().convert(
- e,
- "error performing bulk delete",
- deletes[i]
- );
- }
- }
-
- return resultCount;
+ BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, deleteHandler.getTargetedQueryable() );
+ if ( session.isEventSource() ) {
+ ( (EventSource) session ).getActionQueue().addAction( action );
}
- finally {
- dropTemporaryTableIfNecessary( persister, session );
+ else {
+ action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
}
- }
- @Override
- protected Queryable[] getAffectedQueryables() {
- return new Queryable[] { persister };
+ return deleteHandler.execute( session, parameters );
}
}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java
index b5168bb3cb..b78afe8f3f 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java
@@ -24,178 +24,44 @@
*/
package org.hibernate.hql.internal.ast.exec;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.jboss.logging.Logger;
-
import org.hibernate.HibernateException;
+import org.hibernate.action.internal.BulkOperationCleanupAction;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
-import org.hibernate.hql.internal.ast.tree.AssignmentSpecification;
-import org.hibernate.hql.internal.ast.tree.FromElement;
-import org.hibernate.hql.internal.ast.tree.UpdateStatement;
-import org.hibernate.internal.CoreMessageLogger;
-import org.hibernate.internal.util.StringHelper;
-import org.hibernate.param.ParameterSpecification;
-import org.hibernate.persister.entity.Queryable;
-import org.hibernate.sql.Update;
+import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
/**
* Implementation of MultiTableUpdateExecutor.
*
* @author Steve Ebersole
*/
-public class MultiTableUpdateExecutor extends AbstractStatementExecutor {
-
- private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
- MultiTableUpdateExecutor.class.getName());
-
- private final Queryable persister;
- private final String idInsertSelect;
- private final String[] updates;
- private final ParameterSpecification[][] hqlParameters;
+public class MultiTableUpdateExecutor implements StatementExecutor {
+ private final MultiTableBulkIdStrategy.UpdateHandler updateHandler;
public MultiTableUpdateExecutor(HqlSqlWalker walker) {
- super(walker, null);
-
- if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) {
- throw new HibernateException( "cannot doAfterTransactionCompletion multi-table updates using dialect not supporting temp tables" );
- }
-
- UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST();
- FromElement fromElement = updateStatement.getFromClause().getFromElement();
- String bulkTargetAlias = fromElement.getTableAlias();
- this.persister = fromElement.getQueryable();
-
- this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, updateStatement.getWhereClause() );
- LOG.tracev( "Generated ID-INSERT-SELECT SQL (multi-table update) : {0}", idInsertSelect );
-
- String[] tableNames = persister.getConstraintOrderedTableNameClosure();
- String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure();
-
- String idSubselect = generateIdSubselect( persister );
- List assignmentSpecifications = walker.getAssignmentSpecifications();
-
- updates = new String[tableNames.length];
- hqlParameters = new ParameterSpecification[tableNames.length][];
- for ( int tableIndex = 0; tableIndex < tableNames.length; tableIndex++ ) {
- boolean affected = false;
- List parameterList = new ArrayList();
- Update update = new Update( getFactory().getDialect() )
- .setTableName( tableNames[tableIndex] )
- .setWhere( "(" + StringHelper.join( ", ", columnNames[tableIndex] ) + ") IN (" + idSubselect + ")" );
- if ( getFactory().getSettings().isCommentsEnabled() ) {
- update.setComment( "bulk update" );
- }
- final Iterator itr = assignmentSpecifications.iterator();
- while ( itr.hasNext() ) {
- final AssignmentSpecification specification = ( AssignmentSpecification ) itr.next();
- if ( specification.affectsTable( tableNames[tableIndex] ) ) {
- affected = true;
- update.appendAssignmentFragment( specification.getSqlAssignmentFragment() );
- if ( specification.getParameters() != null ) {
- for ( int paramIndex = 0; paramIndex < specification.getParameters().length; paramIndex++ ) {
- parameterList.add( specification.getParameters()[paramIndex] );
- }
- }
- }
- }
- if ( affected ) {
- updates[tableIndex] = update.toStatementString();
- hqlParameters[tableIndex] = ( ParameterSpecification[] ) parameterList.toArray( new ParameterSpecification[0] );
- }
- }
- }
-
- public Queryable getAffectedQueryable() {
- return persister;
+ MultiTableBulkIdStrategy strategy = walker.getSessionFactoryHelper()
+ .getFactory()
+ .getSettings()
+ .getMultiTableBulkIdStrategy();
+ this.updateHandler = strategy.buildUpdateHandler( walker.getSessionFactoryHelper().getFactory(), walker );
}
public String[] getSqlStatements() {
- return updates;
+ return updateHandler.getSqlStatements();
}
public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException {
- coordinateSharedCacheCleanup( session );
+ BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, updateHandler.getTargetedQueryable() );
- createTemporaryTableIfNecessary( persister, session );
-
- try {
- // First, save off the pertinent ids, as the return value
- PreparedStatement ps = null;
- int resultCount = 0;
- try {
- try {
- ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
-// int parameterStart = getWalker().getNumberOfParametersInSetClause();
-// List allParams = getIdSelectParameterSpecifications();
-// Iterator whereParams = allParams.subList( parameterStart, allParams.size() ).iterator();
- Iterator whereParams = getIdSelectParameterSpecifications().iterator();
- int sum = 1; // jdbc params are 1-based
- while ( whereParams.hasNext() ) {
- sum += ( ( ParameterSpecification ) whereParams.next() ).bind( ps, parameters, session, sum );
- }
- resultCount = ps.executeUpdate();
- }
- finally {
- if ( ps != null ) {
- ps.close();
- }
- }
- }
- catch( SQLException e ) {
- throw getFactory().getSQLExceptionHelper().convert(
- e,
- "could not insert/select ids for bulk update",
- idInsertSelect
- );
- }
-
- // Start performing the updates
- for ( int i = 0; i < updates.length; i++ ) {
- if ( updates[i] == null ) {
- continue;
- }
- try {
- try {
- ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( updates[i], false );
- if ( hqlParameters[i] != null ) {
- int position = 1; // jdbc params are 1-based
- for ( int x = 0; x < hqlParameters[i].length; x++ ) {
- position += hqlParameters[i][x].bind( ps, parameters, session, position );
- }
- }
- ps.executeUpdate();
- }
- finally {
- if ( ps != null ) {
- ps.close();
- }
- }
- }
- catch( SQLException e ) {
- throw getFactory().getSQLExceptionHelper().convert(
- e,
- "error performing bulk update",
- updates[i]
- );
- }
- }
-
- return resultCount;
+ if ( session.isEventSource() ) {
+ ( (EventSource) session ).getActionQueue().addAction( action );
}
- finally {
- dropTemporaryTableIfNecessary( persister, session );
+ else {
+ action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
}
- }
- @Override
- protected Queryable[] getAffectedQueryables() {
- return new Queryable[] { persister };
+ return updateHandler.execute( session, parameters );
}
}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java
index 152eedfb80..f83615af1b 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java
@@ -23,6 +23,8 @@
*/
package org.hibernate.hql.internal.ast.tree;
+import java.util.Arrays;
+
import antlr.SemanticException;
import antlr.collections.AST;
@@ -191,9 +193,7 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
protected static String[] extractMutationTexts(Node operand, int count) {
if ( operand instanceof ParameterNode ) {
String[] rtn = new String[count];
- for ( int i = 0; i < count; i++ ) {
- rtn[i] = "?";
- }
+ Arrays.fill( rtn, "?" );
return rtn;
}
else if ( operand.getType() == HqlSqlTokenTypes.VECTOR_EXPR ) {
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java
index 7be415278f..6bb5e12421 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java
@@ -121,50 +121,70 @@ public class InLogicOperatorNode extends BinaryLogicOperatorNode implements Bina
|| ( !ParameterNode.class.isInstance( getLeftHandOperand() ) ) ? null
: ( (ParameterNode) getLeftHandOperand() )
.getHqlParameterSpecification();
- /**
- * only one element in "in" cluster, e.g.
- * where (a,b) in ( (1,2) ) this will be mutated to
- * where a=1 and b=2
- */
+
+ final boolean negated = getType() == HqlSqlTokenTypes.NOT_IN;
+
if ( rhsNode != null && rhsNode.getNextSibling() == null ) {
- String[] rhsElementTexts = extractMutationTexts( rhsNode,
- rhsColumnSpan );
- setType( HqlSqlTokenTypes.AND );
- setText( "AND" );
- ParameterSpecification rhsEmbeddedCompositeParameterSpecification = rhsNode == null
- || ( !ParameterNode.class.isInstance( rhsNode ) ) ? null
- : ( (ParameterNode) rhsNode )
- .getHqlParameterSpecification();
- translate( lhsColumnSpan, HqlSqlTokenTypes.EQ, "=", lhsElementTexts,
+ /**
+ * only one element in the vector grouping.
+ * where (a,b) in ( (1,2) ) this will be mutated to
+ * where a=1 and b=2
+ */
+ String[] rhsElementTexts = extractMutationTexts( rhsNode, rhsColumnSpan );
+ setType( negated ? HqlTokenTypes.OR : HqlSqlTokenTypes.AND );
+ setText( negated ? "or" : "and" );
+ ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
+ rhsNode == null || ( !ParameterNode.class.isInstance( rhsNode ) )
+ ? null
+ : ( (ParameterNode) rhsNode ).getHqlParameterSpecification();
+ translate(
+ lhsColumnSpan,
+ negated ? HqlSqlTokenTypes.NE : HqlSqlTokenTypes.EQ,
+ negated ? "<>" : "=",
+ lhsElementTexts,
rhsElementTexts,
lhsEmbeddedCompositeParameterSpecification,
- rhsEmbeddedCompositeParameterSpecification, this );
- } else {
+ rhsEmbeddedCompositeParameterSpecification,
+ this
+ );
+ }
+ else {
List andElementsNodeList = new ArrayList();
while ( rhsNode != null ) {
- String[] rhsElementTexts = extractMutationTexts( rhsNode,
- rhsColumnSpan );
- AST and = getASTFactory().create( HqlSqlTokenTypes.AND, "AND" );
- ParameterSpecification rhsEmbeddedCompositeParameterSpecification = rhsNode == null
- || ( !ParameterNode.class.isInstance( rhsNode ) ) ? null
- : ( (ParameterNode) rhsNode )
- .getHqlParameterSpecification();
- translate( lhsColumnSpan, HqlSqlTokenTypes.EQ, "=",
- lhsElementTexts, rhsElementTexts,
+ String[] rhsElementTexts = extractMutationTexts( rhsNode, rhsColumnSpan );
+ AST group = getASTFactory().create(
+ negated ? HqlSqlTokenTypes.OR : HqlSqlTokenTypes.AND,
+ negated ? "or" : "and"
+ );
+ ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
+ rhsNode == null || ( !ParameterNode.class.isInstance( rhsNode ) )
+ ? null
+ : ( (ParameterNode) rhsNode ).getHqlParameterSpecification();
+ translate(
+ lhsColumnSpan,
+ negated ? HqlSqlTokenTypes.NE : HqlSqlTokenTypes.EQ,
+ negated ? "<>" : "=",
+ lhsElementTexts,
+ rhsElementTexts,
lhsEmbeddedCompositeParameterSpecification,
- rhsEmbeddedCompositeParameterSpecification, and );
- andElementsNodeList.add( and );
+ rhsEmbeddedCompositeParameterSpecification,
+ group
+ );
+ andElementsNodeList.add( group );
rhsNode = (Node) rhsNode.getNextSibling();
}
- setType( HqlSqlTokenTypes.OR );
- setText( "OR" );
+ setType( negated ? HqlSqlTokenTypes.AND : HqlSqlTokenTypes.OR );
+ setText( negated ? "and" : "or" );
AST curNode = this;
for ( int i = andElementsNodeList.size() - 1; i > 1; i-- ) {
- AST or = getASTFactory().create( HqlSqlTokenTypes.OR, "OR" );
- curNode.setFirstChild( or );
- curNode = or;
+ AST group = getASTFactory().create(
+ negated ? HqlSqlTokenTypes.AND : HqlSqlTokenTypes.OR,
+ negated ? "and" : "or"
+ );
+ curNode.setFirstChild( group );
+ curNode = group;
AST and = (AST) andElementsNodeList.get( i );
- or.setNextSibling( and );
+ group.setNextSibling( and );
}
AST node0 = (AST) andElementsNodeList.get( 0 );
AST node1 = (AST) andElementsNodeList.get( 1 );
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java
index 6b56e102bb..22c57349b4 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java
@@ -361,7 +361,7 @@ public class SessionFactoryHelper {
* @return The sql function, or null if not found.
*/
public SQLFunction findSQLFunction(String functionName) {
- return sfi.getSqlFunctionRegistry().findSQLFunction( functionName.toLowerCase() );
+ return sfi.getSqlFunctionRegistry().findSQLFunction( functionName );
}
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/AbstractTableBasedBulkIdHandler.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/AbstractTableBasedBulkIdHandler.java
new file mode 100644
index 0000000000..11d2793306
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/AbstractTableBasedBulkIdHandler.java
@@ -0,0 +1,184 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.hql.spi;
+
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.List;
+
+import antlr.RecognitionException;
+import antlr.collections.AST;
+
+import org.hibernate.HibernateException;
+import org.hibernate.JDBCException;
+import org.hibernate.engine.jdbc.spi.JdbcServices;
+import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.hql.internal.ast.HqlSqlWalker;
+import org.hibernate.hql.internal.ast.SqlGenerator;
+import org.hibernate.internal.util.StringHelper;
+import org.hibernate.mapping.Table;
+import org.hibernate.param.ParameterSpecification;
+import org.hibernate.persister.entity.Queryable;
+import org.hibernate.sql.InsertSelect;
+import org.hibernate.sql.Select;
+import org.hibernate.sql.SelectValues;
+
+/**
+ * @author Steve Ebersole
+ */
+public class AbstractTableBasedBulkIdHandler {
+ private final SessionFactoryImplementor sessionFactory;
+ private final HqlSqlWalker walker;
+
+ private final String catalog;
+ private final String schema;
+
+ public AbstractTableBasedBulkIdHandler(
+ SessionFactoryImplementor sessionFactory,
+ HqlSqlWalker walker,
+ String catalog,
+ String schema) {
+ this.sessionFactory = sessionFactory;
+ this.walker = walker;
+ this.catalog = catalog;
+ this.schema = schema;
+ }
+
+ protected SessionFactoryImplementor factory() {
+ return sessionFactory;
+ }
+
+ protected HqlSqlWalker walker() {
+ return walker;
+ }
+
+ protected JDBCException convert(SQLException e, String message, String sql) {
+ throw factory().getSQLExceptionHelper().convert( e, message, sql );
+ }
+
+ protected static class ProcessedWhereClause {
+ public static final ProcessedWhereClause NO_WHERE_CLAUSE = new ProcessedWhereClause();
+
+ private final String userWhereClauseFragment;
+ private final List idSelectParameterSpecifications;
+
+ private ProcessedWhereClause() {
+ this( "", Collections.emptyList() );
+ }
+
+ public ProcessedWhereClause(String userWhereClauseFragment, List idSelectParameterSpecifications) {
+ this.userWhereClauseFragment = userWhereClauseFragment;
+ this.idSelectParameterSpecifications = idSelectParameterSpecifications;
+ }
+
+ public String getUserWhereClauseFragment() {
+ return userWhereClauseFragment;
+ }
+
+ public List getIdSelectParameterSpecifications() {
+ return idSelectParameterSpecifications;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ protected ProcessedWhereClause processWhereClause(AST whereClause) {
+ if ( whereClause.getNumberOfChildren() != 0 ) {
+ // If a where clause was specified in the update/delete query, use it to limit the
+ // returned ids here...
+ try {
+ SqlGenerator sqlGenerator = new SqlGenerator( sessionFactory );
+ sqlGenerator.whereClause( whereClause );
+ String userWhereClause = sqlGenerator.getSQL().substring( 7 ); // strip the " where "
+ List idSelectParameterSpecifications = sqlGenerator.getCollectedParameters();
+
+ return new ProcessedWhereClause( userWhereClause, idSelectParameterSpecifications );
+ }
+ catch ( RecognitionException e ) {
+ throw new HibernateException( "Unable to generate id select for DML operation", e );
+ }
+ }
+ else {
+ return ProcessedWhereClause.NO_WHERE_CLAUSE;
+ }
+ }
+
+ protected String generateIdInsertSelect(Queryable persister, String tableAlias, ProcessedWhereClause whereClause) {
+ Select select = new Select( sessionFactory.getDialect() );
+ SelectValues selectClause = new SelectValues( sessionFactory.getDialect() )
+ .addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() );
+ addAnyExtraIdSelectValues( selectClause );
+ select.setSelectClause( selectClause.render() );
+
+ String rootTableName = persister.getTableName();
+ String fromJoinFragment = persister.fromJoinFragment( tableAlias, true, false );
+ String whereJoinFragment = persister.whereJoinFragment( tableAlias, true, false );
+
+ select.setFromClause( rootTableName + ' ' + tableAlias + fromJoinFragment );
+
+ if ( whereJoinFragment == null ) {
+ whereJoinFragment = "";
+ }
+ else {
+ whereJoinFragment = whereJoinFragment.trim();
+ if ( whereJoinFragment.startsWith( "and" ) ) {
+ whereJoinFragment = whereJoinFragment.substring( 4 );
+ }
+ }
+
+ if ( whereClause.getUserWhereClauseFragment().length() > 0 ) {
+ if ( whereJoinFragment.length() > 0 ) {
+ whereJoinFragment += " and ";
+ }
+ }
+ select.setWhereClause( whereJoinFragment + whereClause.getUserWhereClauseFragment() );
+
+ InsertSelect insert = new InsertSelect( sessionFactory.getDialect() );
+ if ( sessionFactory.getSettings().isCommentsEnabled() ) {
+ insert.setComment( "insert-select for " + persister.getEntityName() + " ids" );
+ }
+ insert.setTableName( determineIdTableName( persister ) );
+ insert.setSelect( select );
+ return insert.toStatementString();
+ }
+
+ protected void addAnyExtraIdSelectValues(SelectValues selectClause) {
+ }
+
+ protected String determineIdTableName(Queryable persister) {
+ // todo : use the identifier/name qualifier service once we pull that over to master
+ return Table.qualify( catalog, schema, persister.getTemporaryIdTableName() );
+ }
+
+ protected String generateIdSubselect(Queryable persister) {
+ return "select " + StringHelper.join( ", ", persister.getIdentifierColumnNames() ) +
+ " from " + determineIdTableName( persister );
+ }
+
+ protected void prepareForUse(Queryable persister, SessionImplementor session) {
+ }
+
+ protected void releaseFromUse(Queryable persister, SessionImplementor session) {
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/MultiTableBulkIdStrategy.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/MultiTableBulkIdStrategy.java
new file mode 100644
index 0000000000..30962d7171
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/MultiTableBulkIdStrategy.java
@@ -0,0 +1,105 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.hql.spi;
+
+import java.util.Map;
+
+import org.hibernate.cfg.Mappings;
+import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
+import org.hibernate.engine.jdbc.spi.JdbcServices;
+import org.hibernate.engine.spi.Mapping;
+import org.hibernate.engine.spi.QueryParameters;
+import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.hql.internal.ast.HqlSqlWalker;
+import org.hibernate.persister.entity.Queryable;
+
+/**
+ * Generalized strategy contract for handling multi-table bulk HQL operations.
+ *
+ * @author Steve Ebersole
+ */
+public interface MultiTableBulkIdStrategy {
+ /**
+ * Prepare the strategy. Called as the SessionFactory is being built. Intended patterns here include:
+ *
Adding tables to the passed Mappings, to be picked by by "schema management tools"
+ *
Manually creating the tables immediately through the passed JDBC Connection access
+ *
+ *
+ * @param jdbcServices The JdbcService object
+ * @param connectionAccess Access to the JDBC Connection
+ * @param mappings The Hibernate Mappings object, for access to O/RM mapping information
+ * @param mapping The Hibernate Mapping contract, mainly for use in DDL generation
+ * @param settings Configuration settings
+ */
+ public void prepare(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess, Mappings mappings, Mapping mapping, Map settings);
+
+ /**
+ * Release the strategy. Called as the SessionFactory is being shut down.
+ *
+ * @param jdbcServices The JdbcService object
+ * @param connectionAccess Access to the JDBC Connection
+ */
+ public void release(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess);
+
+ /**
+ * Handler for dealing with multi-table HQL bulk update statements.
+ */
+ public static interface UpdateHandler {
+ public Queryable getTargetedQueryable();
+ public String[] getSqlStatements();
+
+ public int execute(SessionImplementor session, QueryParameters queryParameters);
+ }
+
+ /**
+ * Build a handler capable of handling the bulk update indicated by the given walker.
+ *
+ * @param factory The SessionFactory
+ * @param walker The AST walker, representing the update query
+ *
+ * @return The handler
+ */
+ public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker);
+
+ /**
+ * Handler for dealing with multi-table HQL bulk delete statements.
+ */
+ public static interface DeleteHandler {
+ public Queryable getTargetedQueryable();
+ public String[] getSqlStatements();
+
+ public int execute(SessionImplementor session, QueryParameters queryParameters);
+ }
+
+ /**
+ * Build a handler capable of handling the bulk delete indicated by the given walker.
+ *
+ * @param factory The SessionFactory
+ * @param walker The AST walker, representing the delete query
+ *
+ * @return The handler
+ */
+ public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker);
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/PersistentTableBulkIdStrategy.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/PersistentTableBulkIdStrategy.java
new file mode 100644
index 0000000000..b328f1136d
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/PersistentTableBulkIdStrategy.java
@@ -0,0 +1,322 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.hql.spi;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.hibernate.HibernateException;
+import org.hibernate.JDBCException;
+import org.hibernate.cfg.AvailableSettings;
+import org.hibernate.cfg.Mappings;
+import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
+import org.hibernate.engine.jdbc.spi.JdbcServices;
+import org.hibernate.engine.spi.Mapping;
+import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.hql.internal.ast.HqlSqlWalker;
+import org.hibernate.internal.AbstractSessionImpl;
+import org.hibernate.internal.CoreMessageLogger;
+import org.hibernate.internal.util.config.ConfigurationHelper;
+import org.hibernate.mapping.Column;
+import org.hibernate.mapping.PersistentClass;
+import org.hibernate.mapping.Table;
+import org.hibernate.persister.entity.Queryable;
+import org.hibernate.sql.SelectValues;
+import org.hibernate.type.UUIDCharType;
+import org.jboss.logging.Logger;
+
+/**
+ * @author Steve Ebersole
+ */
+public class PersistentTableBulkIdStrategy implements MultiTableBulkIdStrategy {
+ private static final CoreMessageLogger log = Logger.getMessageLogger(
+ CoreMessageLogger.class,
+ PersistentTableBulkIdStrategy.class.getName()
+ );
+
+ public static final String SHORT_NAME = "persistent";
+
+ public static final String CLEAN_UP_ID_TABLES = "hibernate.hql.bulk_id_strategy.persistent.clean_up";
+ public static final String SCHEMA = "hibernate.hql.bulk_id_strategy.persistent.schema";
+ public static final String CATALOG = "hibernate.hql.bulk_id_strategy.persistent.catalog";
+
+ private String catalog;
+ private String schema;
+ private boolean cleanUpTables;
+ private List tableCleanUpDdl;
+
+ @Override
+ public void prepare(
+ JdbcServices jdbcServices,
+ JdbcConnectionAccess connectionAccess,
+ Mappings mappings,
+ Mapping mapping,
+ Map settings) {
+ this.catalog = ConfigurationHelper.getString(
+ CATALOG,
+ settings,
+ ConfigurationHelper.getString( AvailableSettings.DEFAULT_CATALOG, settings )
+ );
+ this.schema = ConfigurationHelper.getString(
+ SCHEMA,
+ settings,
+ ConfigurationHelper.getString( AvailableSettings.DEFAULT_SCHEMA, settings )
+ );
+ this.cleanUpTables = ConfigurationHelper.getBoolean( CLEAN_UP_ID_TABLES, settings, false );
+
+ final Iterator entityMappings = mappings.iterateClasses();
+ final List
idTableDefinitions = new ArrayList
();
+ while ( entityMappings.hasNext() ) {
+ final PersistentClass entityMapping = entityMappings.next();
+ final Table idTableDefinition = generateIdTableDefinition( entityMapping );
+ idTableDefinitions.add( idTableDefinition );
+ }
+ exportTableDefinitions( idTableDefinitions, jdbcServices, connectionAccess, mappings, mapping );
+ }
+
+ protected Table generateIdTableDefinition(PersistentClass entityMapping) {
+ Table idTable = new Table( entityMapping.getTemporaryIdTableName() );
+ if ( catalog != null ) {
+ idTable.setCatalog( catalog );
+ }
+ if ( schema != null ) {
+ idTable.setSchema( schema );
+ }
+ Iterator itr = entityMapping.getTable().getPrimaryKey().getColumnIterator();
+ while( itr.hasNext() ) {
+ Column column = (Column) itr.next();
+ idTable.addColumn( column.clone() );
+ }
+ Column sessionIdColumn = new Column( "hib_sess_id" );
+ sessionIdColumn.setSqlType( "CHAR(36)" );
+ sessionIdColumn.setComment( "Used to hold the Hibernate Session identifier" );
+ idTable.addColumn( sessionIdColumn );
+
+ idTable.setComment( "Used to hold id values for the " + entityMapping.getEntityName() + " class" );
+ return idTable;
+ }
+
+ protected void exportTableDefinitions(
+ List
idTableDefinitions,
+ JdbcServices jdbcServices,
+ JdbcConnectionAccess connectionAccess,
+ Mappings mappings,
+ Mapping mapping) {
+ try {
+ Connection connection;
+ try {
+ connection = connectionAccess.obtainConnection();
+ }
+ catch (UnsupportedOperationException e) {
+ // assume this comes from org.hibernate.engine.jdbc.connections.internal.UserSuppliedConnectionProviderImpl
+ log.debug( "Unable to obtain JDBC connection; assuming ID tables already exist or wont be needed" );
+ return;
+ }
+
+ try {
+ Statement statement = connection.createStatement();
+
+ for ( Table idTableDefinition : idTableDefinitions ) {
+ if ( cleanUpTables ) {
+ if ( tableCleanUpDdl == null ) {
+ tableCleanUpDdl = new ArrayList();
+ }
+ tableCleanUpDdl.add( idTableDefinition.sqlDropString( jdbcServices.getDialect(), null, null ) );
+ }
+ try {
+ final String sql = idTableDefinition.sqlCreateString( jdbcServices.getDialect(), mapping, null, null );
+ jdbcServices.getSqlStatementLogger().logStatement( sql );
+ statement.execute( sql );
+ }
+ catch (SQLException e) {
+ log.debugf( "Error attempting to export id-table [%s] : %s", idTableDefinition.getName(), e.getMessage() );
+ }
+ }
+
+ statement.close();
+ }
+ catch (SQLException e) {
+ log.error( "Unable to use JDBC Connection to create Statement", e );
+ }
+ finally {
+ try {
+ connectionAccess.releaseConnection( connection );
+ }
+ catch (SQLException ignore) {
+ }
+ }
+ }
+ catch (SQLException e) {
+ log.error( "Unable obtain JDBC Connection", e );
+ }
+ }
+
+ @Override
+ public void release(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess) {
+ if ( ! cleanUpTables || tableCleanUpDdl == null ) {
+ return;
+ }
+
+ try {
+ Connection connection = connectionAccess.obtainConnection();
+
+ try {
+ Statement statement = connection.createStatement();
+
+ for ( String cleanupDdl : tableCleanUpDdl ) {
+ try {
+ jdbcServices.getSqlStatementLogger().logStatement( cleanupDdl );
+ statement.execute( cleanupDdl );
+ }
+ catch (SQLException e) {
+ log.debugf( "Error attempting to cleanup id-table : [%s]", e.getMessage() );
+ }
+ }
+
+ statement.close();
+ }
+ catch (SQLException e) {
+ log.error( "Unable to use JDBC Connection to create Statement", e );
+ }
+ finally {
+ try {
+ connectionAccess.releaseConnection( connection );
+ }
+ catch (SQLException ignore) {
+ }
+ }
+ }
+ catch (SQLException e) {
+ log.error( "Unable obtain JDBC Connection", e );
+ }
+ }
+
+ @Override
+ public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
+ return new TableBasedUpdateHandlerImpl( factory, walker, catalog, schema ) {
+ @Override
+ protected void addAnyExtraIdSelectValues(SelectValues selectClause) {
+ selectClause.addParameter( Types.CHAR, 36 );
+ }
+
+ @Override
+ protected String generateIdSubselect(Queryable persister) {
+ return super.generateIdSubselect( persister ) + " where hib_sess_id=?";
+ }
+
+ @Override
+ protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
+ bindSessionIdentifier( ps, session, pos );
+ return 1;
+ }
+
+ @Override
+ protected void handleAddedParametersOnUpdate(PreparedStatement ps, SessionImplementor session, int position) throws SQLException {
+ bindSessionIdentifier( ps, session, position );
+ }
+
+ @Override
+ protected void releaseFromUse(Queryable persister, SessionImplementor session) {
+ // clean up our id-table rows
+ cleanUpRows( determineIdTableName( persister ), session );
+ }
+ };
+ }
+
+ private void bindSessionIdentifier(PreparedStatement ps, SessionImplementor session, int position) throws SQLException {
+ if ( ! AbstractSessionImpl.class.isInstance( session ) ) {
+ throw new HibernateException( "Only available on SessionImpl instances" );
+ }
+ UUIDCharType.INSTANCE.set( ps, ( (AbstractSessionImpl) session ).getSessionIdentifier(), position, session );
+ }
+
+ private void cleanUpRows(String tableName, SessionImplementor session) {
+ final String sql = "delete from " + tableName + " where hib_sess_id=?";
+ try {
+ PreparedStatement ps = null;
+ try {
+ ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false );
+ bindSessionIdentifier( ps, session, 1 );
+ ps.executeUpdate();
+ }
+ finally {
+ if ( ps != null ) {
+ try {
+ ps.close();
+ }
+ catch( Throwable ignore ) {
+ // ignore
+ }
+ }
+ }
+ }
+ catch (SQLException e) {
+ throw convert( session.getFactory(), e, "Unable to clean up id table [" + tableName + "]", sql );
+ }
+ }
+
+ protected JDBCException convert(SessionFactoryImplementor factory, SQLException e, String message, String sql) {
+ throw factory.getSQLExceptionHelper().convert( e, message, sql );
+ }
+
+ @Override
+ public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
+ return new TableBasedDeleteHandlerImpl( factory, walker, catalog, schema ) {
+ @Override
+ protected void addAnyExtraIdSelectValues(SelectValues selectClause) {
+ selectClause.addParameter( Types.CHAR, 36 );
+ }
+
+ @Override
+ protected String generateIdSubselect(Queryable persister) {
+ return super.generateIdSubselect( persister ) + " where hib_sess_id=?";
+ }
+
+ @Override
+ protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
+ bindSessionIdentifier( ps, session, pos );
+ return 1;
+ }
+
+ @Override
+ protected void handleAddedParametersOnDelete(PreparedStatement ps, SessionImplementor session) throws SQLException {
+ bindSessionIdentifier( ps, session, 1 );
+ }
+
+ @Override
+ protected void releaseFromUse(Queryable persister, SessionImplementor session) {
+ // clean up our id-table rows
+ cleanUpRows( determineIdTableName( persister ), session );
+ }
+ };
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedDeleteHandlerImpl.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedDeleteHandlerImpl.java
new file mode 100644
index 0000000000..8b51d537b3
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedDeleteHandlerImpl.java
@@ -0,0 +1,172 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.hql.spi;
+
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.jboss.logging.Logger;
+
+import org.hibernate.engine.spi.QueryParameters;
+import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.hql.internal.ast.HqlSqlWalker;
+import org.hibernate.hql.internal.ast.tree.DeleteStatement;
+import org.hibernate.hql.internal.ast.tree.FromElement;
+import org.hibernate.internal.util.StringHelper;
+import org.hibernate.param.ParameterSpecification;
+import org.hibernate.persister.entity.Queryable;
+import org.hibernate.sql.Delete;
+
+/**
+* @author Steve Ebersole
+*/
+public class TableBasedDeleteHandlerImpl
+ extends AbstractTableBasedBulkIdHandler
+ implements MultiTableBulkIdStrategy.DeleteHandler {
+ private static final Logger log = Logger.getLogger( TableBasedDeleteHandlerImpl.class );
+
+ private final Queryable targetedPersister;
+
+ private final String idInsertSelect;
+ private final List idSelectParameterSpecifications;
+ private final String[] deletes;
+
+ public TableBasedDeleteHandlerImpl(SessionFactoryImplementor factory, HqlSqlWalker walker) {
+ this( factory, walker, null, null );
+ }
+
+ public TableBasedDeleteHandlerImpl(
+ SessionFactoryImplementor factory,
+ HqlSqlWalker walker,
+ String catalog,
+ String schema) {
+ super( factory, walker, catalog, schema );
+
+ DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST();
+ FromElement fromElement = deleteStatement.getFromClause().getFromElement();
+
+ this.targetedPersister = fromElement.getQueryable();
+ final String bulkTargetAlias = fromElement.getTableAlias();
+
+ final ProcessedWhereClause processedWhereClause = processWhereClause( deleteStatement.getWhereClause() );
+ this.idSelectParameterSpecifications = processedWhereClause.getIdSelectParameterSpecifications();
+ this.idInsertSelect = generateIdInsertSelect( targetedPersister, bulkTargetAlias, processedWhereClause );
+ log.tracev( "Generated ID-INSERT-SELECT SQL (multi-table delete) : {0}", idInsertSelect );
+
+ String[] tableNames = targetedPersister.getConstraintOrderedTableNameClosure();
+ String[][] columnNames = targetedPersister.getContraintOrderedTableKeyColumnClosure();
+ String idSubselect = generateIdSubselect( targetedPersister );
+
+ deletes = new String[tableNames.length];
+ for ( int i = tableNames.length - 1; i >= 0; i-- ) {
+ // TODO : an optimization here would be to consider cascade deletes and not gen those delete statements;
+ // the difficulty is the ordering of the tables here vs the cascade attributes on the persisters ->
+ // the table info gotten here should really be self-contained (i.e., a class representation
+ // defining all the needed attributes), then we could then get an array of those
+ final Delete delete = new Delete()
+ .setTableName( tableNames[i] )
+ .setWhere( "(" + StringHelper.join( ", ", columnNames[i] ) + ") IN (" + idSubselect + ")" );
+ if ( factory().getSettings().isCommentsEnabled() ) {
+ delete.setComment( "bulk delete" );
+ }
+
+ deletes[i] = delete.toStatementString();
+ }
+ }
+
+ @Override
+ public Queryable getTargetedQueryable() {
+ return targetedPersister;
+ }
+
+ @Override
+ public String[] getSqlStatements() {
+ return deletes;
+ }
+
+ @Override
+ public int execute(SessionImplementor session, QueryParameters queryParameters) {
+ prepareForUse( targetedPersister, session );
+ try {
+ PreparedStatement ps = null;
+ int resultCount = 0;
+ try {
+ try {
+ ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
+ int pos = 1;
+ pos += handlePrependedParametersOnIdSelection( ps, session, pos );
+ for ( ParameterSpecification parameterSpecification : idSelectParameterSpecifications ) {
+ pos += parameterSpecification.bind( ps, queryParameters, session, pos );
+ }
+ resultCount = ps.executeUpdate();
+ }
+ finally {
+ if ( ps != null ) {
+ ps.close();
+ }
+ }
+ }
+ catch( SQLException e ) {
+ throw convert( e, "could not insert/select ids for bulk delete", idInsertSelect );
+ }
+
+ // Start performing the deletes
+ for ( String delete : deletes ) {
+ try {
+ try {
+ ps = session.getTransactionCoordinator()
+ .getJdbcCoordinator()
+ .getStatementPreparer()
+ .prepareStatement( delete, false );
+ handleAddedParametersOnDelete( ps, session );
+ ps.executeUpdate();
+ }
+ finally {
+ if ( ps != null ) {
+ ps.close();
+ }
+ }
+ }
+ catch (SQLException e) {
+ throw convert( e, "error performing bulk delete", delete );
+ }
+ }
+
+ return resultCount;
+
+ }
+ finally {
+ releaseFromUse( targetedPersister, session );
+ }
+ }
+
+ protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
+ return 0;
+ }
+
+ protected void handleAddedParametersOnDelete(PreparedStatement ps, SessionImplementor session) throws SQLException {
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedUpdateHandlerImpl.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedUpdateHandlerImpl.java
new file mode 100644
index 0000000000..5b2a990213
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedUpdateHandlerImpl.java
@@ -0,0 +1,198 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.hql.spi;
+
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.jboss.logging.Logger;
+
+import org.hibernate.engine.spi.QueryParameters;
+import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.hql.internal.ast.HqlSqlWalker;
+import org.hibernate.hql.internal.ast.tree.AssignmentSpecification;
+import org.hibernate.hql.internal.ast.tree.FromElement;
+import org.hibernate.hql.internal.ast.tree.UpdateStatement;
+import org.hibernate.internal.util.StringHelper;
+import org.hibernate.param.ParameterSpecification;
+import org.hibernate.persister.entity.Queryable;
+import org.hibernate.sql.Update;
+
+/**
+* @author Steve Ebersole
+*/
+public class TableBasedUpdateHandlerImpl
+ extends AbstractTableBasedBulkIdHandler
+ implements MultiTableBulkIdStrategy.UpdateHandler {
+
+ private static final Logger log = Logger.getLogger( TableBasedUpdateHandlerImpl.class );
+
+ private final Queryable targetedPersister;
+
+ private final String idInsertSelect;
+ private final List idSelectParameterSpecifications;
+
+ private final String[] updates;
+ private final ParameterSpecification[][] assignmentParameterSpecifications;
+
+ @SuppressWarnings("unchecked")
+ public TableBasedUpdateHandlerImpl(SessionFactoryImplementor factory, HqlSqlWalker walker) {
+ this( factory, walker, null, null );
+ }
+
+ public TableBasedUpdateHandlerImpl(
+ SessionFactoryImplementor factory,
+ HqlSqlWalker walker,
+ String catalog,
+ String schema) {
+ super( factory, walker, catalog, schema );
+
+ UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST();
+ FromElement fromElement = updateStatement.getFromClause().getFromElement();
+
+ this.targetedPersister = fromElement.getQueryable();
+ final String bulkTargetAlias = fromElement.getTableAlias();
+
+ final ProcessedWhereClause processedWhereClause = processWhereClause( updateStatement.getWhereClause() );
+ this.idSelectParameterSpecifications = processedWhereClause.getIdSelectParameterSpecifications();
+ this.idInsertSelect = generateIdInsertSelect( targetedPersister, bulkTargetAlias, processedWhereClause );
+ log.tracev( "Generated ID-INSERT-SELECT SQL (multi-table update) : {0}", idInsertSelect );
+
+ String[] tableNames = targetedPersister.getConstraintOrderedTableNameClosure();
+ String[][] columnNames = targetedPersister.getContraintOrderedTableKeyColumnClosure();
+ String idSubselect = generateIdSubselect( targetedPersister );
+
+ updates = new String[tableNames.length];
+ assignmentParameterSpecifications = new ParameterSpecification[tableNames.length][];
+ for ( int tableIndex = 0; tableIndex < tableNames.length; tableIndex++ ) {
+ boolean affected = false;
+ final List parameterList = new ArrayList();
+ final Update update = new Update( factory().getDialect() )
+ .setTableName( tableNames[tableIndex] )
+ .setWhere( "(" + StringHelper.join( ", ", columnNames[tableIndex] ) + ") IN (" + idSubselect + ")" );
+ if ( factory().getSettings().isCommentsEnabled() ) {
+ update.setComment( "bulk update" );
+ }
+ final List assignmentSpecifications = walker.getAssignmentSpecifications();
+ for ( AssignmentSpecification assignmentSpecification : assignmentSpecifications ) {
+ if ( assignmentSpecification.affectsTable( tableNames[tableIndex] ) ) {
+ affected = true;
+ update.appendAssignmentFragment( assignmentSpecification.getSqlAssignmentFragment() );
+ if ( assignmentSpecification.getParameters() != null ) {
+ for ( int paramIndex = 0; paramIndex < assignmentSpecification.getParameters().length; paramIndex++ ) {
+ parameterList.add( assignmentSpecification.getParameters()[paramIndex] );
+ }
+ }
+ }
+ }
+ if ( affected ) {
+ updates[tableIndex] = update.toStatementString();
+ assignmentParameterSpecifications[tableIndex] = parameterList.toArray( new ParameterSpecification[parameterList.size()] );
+ }
+ }
+ }
+
+ @Override
+ public Queryable getTargetedQueryable() {
+ return targetedPersister;
+ }
+
+ @Override
+ public String[] getSqlStatements() {
+ return updates;
+ }
+
+ @Override
+ public int execute(SessionImplementor session, QueryParameters queryParameters) {
+ prepareForUse( targetedPersister, session );
+ try {
+ // First, save off the pertinent ids, as the return value
+ PreparedStatement ps = null;
+ int resultCount = 0;
+ try {
+ try {
+ ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
+ int sum = 1;
+ sum += handlePrependedParametersOnIdSelection( ps, session, sum );
+ for ( ParameterSpecification parameterSpecification : idSelectParameterSpecifications ) {
+ sum += parameterSpecification.bind( ps, queryParameters, session, sum );
+ }
+ resultCount = ps.executeUpdate();
+ }
+ finally {
+ if ( ps != null ) {
+ ps.close();
+ }
+ }
+ }
+ catch( SQLException e ) {
+ throw convert( e, "could not insert/select ids for bulk update", idInsertSelect );
+ }
+
+ // Start performing the updates
+ for ( int i = 0; i < updates.length; i++ ) {
+ if ( updates[i] == null ) {
+ continue;
+ }
+ try {
+ try {
+ ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( updates[i], false );
+ if ( assignmentParameterSpecifications[i] != null ) {
+ int position = 1; // jdbc params are 1-based
+ for ( int x = 0; x < assignmentParameterSpecifications[i].length; x++ ) {
+ position += assignmentParameterSpecifications[i][x].bind( ps, queryParameters, session, position );
+ }
+ handleAddedParametersOnUpdate( ps, session, position );
+ }
+ ps.executeUpdate();
+ }
+ finally {
+ if ( ps != null ) {
+ ps.close();
+ }
+ }
+ }
+ catch( SQLException e ) {
+ throw convert( e, "error performing bulk update", updates[i] );
+ }
+ }
+
+ return resultCount;
+ }
+ finally {
+ releaseFromUse( targetedPersister, session );
+ }
+ }
+
+ protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
+ return 0;
+ }
+
+ protected void handleAddedParametersOnUpdate(PreparedStatement ps, SessionImplementor session, int position) throws SQLException {
+ //To change body of created methods use File | Settings | File Templates.
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/AbstractStatementExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/TemporaryTableBulkIdStrategy.java
similarity index 50%
rename from hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/AbstractStatementExecutor.java
rename to hibernate-core/src/main/java/org/hibernate/hql/spi/TemporaryTableBulkIdStrategy.java
index d745bac252..e3a57c0319 100644
--- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/AbstractStatementExecutor.java
+++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/TemporaryTableBulkIdStrategy.java
@@ -1,7 +1,7 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
@@ -21,122 +21,156 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
-package org.hibernate.hql.internal.ast.exec;
+package org.hibernate.hql.spi;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLWarning;
import java.sql.Statement;
-import java.util.Collections;
-import java.util.List;
+import java.util.Map;
-import antlr.RecognitionException;
-import antlr.collections.AST;
-import org.jboss.logging.Logger;
-
-import org.hibernate.HibernateException;
-import org.hibernate.action.internal.BulkOperationCleanupAction;
+import org.hibernate.cfg.Mappings;
+import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
+import org.hibernate.engine.spi.Mapping;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
-import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
-import org.hibernate.hql.internal.ast.SqlGenerator;
import org.hibernate.internal.CoreMessageLogger;
-import org.hibernate.internal.util.StringHelper;
import org.hibernate.jdbc.AbstractWork;
import org.hibernate.persister.entity.Queryable;
-import org.hibernate.sql.InsertSelect;
-import org.hibernate.sql.Select;
-import org.hibernate.sql.SelectFragment;
+import org.jboss.logging.Logger;
/**
- * Implementation of AbstractStatementExecutor.
- *
* @author Steve Ebersole
*/
-public abstract class AbstractStatementExecutor implements StatementExecutor {
+public class TemporaryTableBulkIdStrategy implements MultiTableBulkIdStrategy {
+ public static final TemporaryTableBulkIdStrategy INSTANCE = new TemporaryTableBulkIdStrategy();
- private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
- AbstractStatementExecutor.class.getName());
+ public static final String SHORT_NAME = "temporary";
- private final HqlSqlWalker walker;
- private List idSelectParameterSpecifications = Collections.EMPTY_LIST;
+ private static final CoreMessageLogger log = Logger.getMessageLogger(
+ CoreMessageLogger.class,
+ TemporaryTableBulkIdStrategy.class.getName()
+ );
- public AbstractStatementExecutor( HqlSqlWalker walker,
- CoreMessageLogger log ) {
- this.walker = walker;
+ @Override
+ public void prepare(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess, Mappings mappings, Mapping mapping, Map settings) {
+ // nothing to do
}
- protected HqlSqlWalker getWalker() {
- return walker;
+ @Override
+ public void release(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess) {
+ // nothing to do
}
- protected SessionFactoryImplementor getFactory() {
- return walker.getSessionFactoryHelper().getFactory();
+ @Override
+ public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
+ return new TableBasedUpdateHandlerImpl( factory, walker ) {
+ @Override
+ protected void prepareForUse(Queryable persister, SessionImplementor session) {
+ createTempTable( persister, session );
+ }
+
+ @Override
+ protected void releaseFromUse(Queryable persister, SessionImplementor session) {
+ releaseTempTable( persister, session );
+ }
+ };
}
- protected List getIdSelectParameterSpecifications() {
- return idSelectParameterSpecifications;
+ @Override
+ public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
+ return new TableBasedDeleteHandlerImpl( factory, walker ) {
+ @Override
+ protected void prepareForUse(Queryable persister, SessionImplementor session) {
+ createTempTable( persister, session );
+ }
+
+ @Override
+ protected void releaseFromUse(Queryable persister, SessionImplementor session) {
+ releaseTempTable( persister, session );
+ }
+ };
}
- protected abstract Queryable[] getAffectedQueryables();
- protected String generateIdInsertSelect(Queryable persister, String tableAlias, AST whereClause) {
- Select select = new Select( getFactory().getDialect() );
- SelectFragment selectFragment = new SelectFragment()
- .addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() );
- select.setSelectClause( selectFragment.toFragmentString().substring( 2 ) );
-
- String rootTableName = persister.getTableName();
- String fromJoinFragment = persister.fromJoinFragment( tableAlias, true, false );
- String whereJoinFragment = persister.whereJoinFragment( tableAlias, true, false );
-
- select.setFromClause( rootTableName + ' ' + tableAlias + fromJoinFragment );
-
- if ( whereJoinFragment == null ) {
- whereJoinFragment = "";
+ protected void createTempTable(Queryable persister, SessionImplementor session) {
+ // Don't really know all the codes required to adequately decipher returned jdbc exceptions here.
+ // simply allow the failure to be eaten and the subsequent insert-selects/deletes should fail
+ TemporaryTableCreationWork work = new TemporaryTableCreationWork( persister );
+ if ( shouldIsolateTemporaryTableDDL( session ) ) {
+ session.getTransactionCoordinator()
+ .getTransaction()
+ .createIsolationDelegate()
+ .delegateWork( work, session.getFactory().getSettings().isDataDefinitionInTransactionSupported() );
}
else {
- whereJoinFragment = whereJoinFragment.trim();
- if ( whereJoinFragment.startsWith( "and" ) ) {
- whereJoinFragment = whereJoinFragment.substring( 4 );
- }
+ final Connection connection = session.getTransactionCoordinator()
+ .getJdbcCoordinator()
+ .getLogicalConnection()
+ .getShareableConnectionProxy();
+ work.execute( connection );
+ session.getTransactionCoordinator()
+ .getJdbcCoordinator()
+ .getLogicalConnection()
+ .afterStatementExecution();
}
-
- String userWhereClause = "";
- if ( whereClause.getNumberOfChildren() != 0 ) {
- // If a where clause was specified in the update/delete query, use it to limit the
- // returned ids here...
- try {
- SqlGenerator sqlGenerator = new SqlGenerator( getFactory() );
- sqlGenerator.whereClause( whereClause );
- userWhereClause = sqlGenerator.getSQL().substring( 7 ); // strip the " where "
- idSelectParameterSpecifications = sqlGenerator.getCollectedParameters();
- }
- catch ( RecognitionException e ) {
- throw new HibernateException( "Unable to generate id select for DML operation", e );
- }
- if ( whereJoinFragment.length() > 0 ) {
- whereJoinFragment += " and ";
- }
- }
-
- select.setWhereClause( whereJoinFragment + userWhereClause );
-
- InsertSelect insert = new InsertSelect( getFactory().getDialect() );
- if ( getFactory().getSettings().isCommentsEnabled() ) {
- insert.setComment( "insert-select for " + persister.getEntityName() + " ids" );
- }
- insert.setTableName( persister.getTemporaryIdTableName() );
- insert.setSelect( select );
- return insert.toStatementString();
}
- protected String generateIdSubselect(Queryable persister) {
- return "select " + StringHelper.join( ", ", persister.getIdentifierColumnNames() ) +
- " from " + persister.getTemporaryIdTableName();
+ protected void releaseTempTable(Queryable persister, SessionImplementor session) {
+ if ( session.getFactory().getDialect().dropTemporaryTableAfterUse() ) {
+ TemporaryTableDropWork work = new TemporaryTableDropWork( persister, session );
+ if ( shouldIsolateTemporaryTableDDL( session ) ) {
+ session.getTransactionCoordinator()
+ .getTransaction()
+ .createIsolationDelegate()
+ .delegateWork( work, session.getFactory().getSettings().isDataDefinitionInTransactionSupported() );
+ }
+ else {
+ final Connection connection = session.getTransactionCoordinator()
+ .getJdbcCoordinator()
+ .getLogicalConnection()
+ .getShareableConnectionProxy();
+ work.execute( connection );
+ session.getTransactionCoordinator()
+ .getJdbcCoordinator()
+ .getLogicalConnection()
+ .afterStatementExecution();
+ }
+ }
+ else {
+ // at the very least cleanup the data :)
+ PreparedStatement ps = null;
+ try {
+ final String sql = "delete from " + persister.getTemporaryIdTableName();
+ ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false );
+ ps.executeUpdate();
+ }
+ catch( Throwable t ) {
+ log.unableToCleanupTemporaryIdTable(t);
+ }
+ finally {
+ if ( ps != null ) {
+ try {
+ ps.close();
+ }
+ catch( Throwable ignore ) {
+ // ignore
+ }
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings({ "UnnecessaryUnboxing" })
+ protected boolean shouldIsolateTemporaryTableDDL(SessionImplementor session) {
+ Boolean dialectVote = session.getFactory().getDialect().performTemporaryTableDDLInIsolation();
+ if ( dialectVote != null ) {
+ return dialectVote.booleanValue();
+ }
+ return session.getFactory().getSettings().isDataDefinitionImplicitCommit();
}
private static class TemporaryTableCreationWork extends AbstractWork {
@@ -168,46 +202,24 @@ public abstract class AbstractStatementExecutor implements StatementExecutor {
}
}
catch( Exception e ) {
- LOG.debug( "unable to create temporary id table [" + e.getMessage() + "]" );
+ log.debug( "unable to create temporary id table [" + e.getMessage() + "]" );
}
}
}
- protected void createTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) {
- // Don't really know all the codes required to adequately decipher returned jdbc exceptions here.
- // simply allow the failure to be eaten and the subsequent insert-selects/deletes should fail
- TemporaryTableCreationWork work = new TemporaryTableCreationWork( persister );
- if ( shouldIsolateTemporaryTableDDL() ) {
- session.getTransactionCoordinator()
- .getTransaction()
- .createIsolationDelegate()
- .delegateWork( work, getFactory().getSettings().isDataDefinitionInTransactionSupported() );
- }
- else {
- final Connection connection = session.getTransactionCoordinator()
- .getJdbcCoordinator()
- .getLogicalConnection()
- .getShareableConnectionProxy();
- work.execute( connection );
- session.getTransactionCoordinator()
- .getJdbcCoordinator()
- .getLogicalConnection()
- .afterStatementExecution();
- }
- }
private static SqlExceptionHelper.WarningHandler CREATION_WARNING_HANDLER = new SqlExceptionHelper.WarningHandlerLoggingSupport() {
public boolean doProcess() {
- return LOG.isDebugEnabled();
+ return log.isDebugEnabled();
}
public void prepare(SQLWarning warning) {
- LOG.warningsCreatingTempTable( warning );
+ log.warningsCreatingTempTable( warning );
}
@Override
protected void logWarning(String description, String message) {
- LOG.debug( description );
- LOG.debug( message );
+ log.debug( description );
+ log.debug( message );
}
};
@@ -240,71 +252,9 @@ public abstract class AbstractStatementExecutor implements StatementExecutor {
}
}
catch( Exception e ) {
- LOG.warn( "unable to drop temporary id table after use [" + e.getMessage() + "]" );
+ log.warn( "unable to drop temporary id table after use [" + e.getMessage() + "]" );
}
}
}
- protected void dropTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) {
- if ( getFactory().getDialect().dropTemporaryTableAfterUse() ) {
- TemporaryTableDropWork work = new TemporaryTableDropWork( persister, session );
- if ( shouldIsolateTemporaryTableDDL() ) {
- session.getTransactionCoordinator()
- .getTransaction()
- .createIsolationDelegate()
- .delegateWork( work, getFactory().getSettings().isDataDefinitionInTransactionSupported() );
- }
- else {
- final Connection connection = session.getTransactionCoordinator()
- .getJdbcCoordinator()
- .getLogicalConnection()
- .getShareableConnectionProxy();
- work.execute( connection );
- session.getTransactionCoordinator()
- .getJdbcCoordinator()
- .getLogicalConnection()
- .afterStatementExecution();
- }
- }
- else {
- // at the very least cleanup the data :)
- PreparedStatement ps = null;
- try {
- final String sql = "delete from " + persister.getTemporaryIdTableName();
- ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false );
- ps.executeUpdate();
- }
- catch( Throwable t ) {
- LOG.unableToCleanupTemporaryIdTable(t);
- }
- finally {
- if ( ps != null ) {
- try {
- ps.close();
- }
- catch( Throwable ignore ) {
- // ignore
- }
- }
- }
- }
- }
-
- protected void coordinateSharedCacheCleanup(SessionImplementor session) {
- BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getAffectedQueryables() );
-
- if ( session.isEventSource() ) {
- ( ( EventSource ) session ).getActionQueue().addAction( action );
- }
- else {
- action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
- }
- }
-
- @SuppressWarnings({ "UnnecessaryUnboxing" })
- protected boolean shouldIsolateTemporaryTableDDL() {
- Boolean dialectVote = getFactory().getDialect().performTemporaryTableDDLInIsolation();
- if (dialectVote != null) return dialectVote.booleanValue();
- return getFactory().getSettings().isDataDefinitionImplicitCommit();
- }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java b/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java
index 20029ac0fb..94890271ed 100755
--- a/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java
+++ b/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java
@@ -27,6 +27,7 @@ import java.io.Serializable;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
+import java.util.UUID;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
@@ -51,6 +52,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.transaction.spi.TransactionContext;
import org.hibernate.engine.transaction.spi.TransactionEnvironment;
+import org.hibernate.id.uuid.StandardRandomStrategy;
import org.hibernate.jdbc.WorkExecutor;
import org.hibernate.jdbc.WorkExecutorVisitable;
import org.hibernate.persister.entity.EntityPersister;
@@ -317,6 +319,15 @@ public abstract class AbstractSessionImpl implements Serializable, SharedSession
return jdbcConnectionAccess;
}
+ private UUID sessionIdentifier;
+
+ public UUID getSessionIdentifier() {
+ if ( sessionIdentifier == null ) {
+ sessionIdentifier = StandardRandomStrategy.INSTANCE.generateUUID( this );
+ }
+ return sessionIdentifier;
+ }
+
private static class NonContextualJdbcConnectionAccess implements JdbcConnectionAccess, Serializable {
private final ConnectionProvider connectionProvider;
diff --git a/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java b/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java
index de1a9f5d96..70dcaa9666 100644
--- a/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java
+++ b/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java
@@ -29,6 +29,7 @@ import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.sql.Connection;
+import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -38,12 +39,11 @@ import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
+
import javax.naming.Reference;
import javax.naming.StringRefAddr;
import javax.persistence.metamodel.Metamodel;
-import org.jboss.logging.Logger;
-
import org.hibernate.AssertionFailure;
import org.hibernate.Cache;
import org.hibernate.ConnectionReleaseMode;
@@ -53,6 +53,7 @@ import org.hibernate.EntityNameResolver;
import org.hibernate.HibernateException;
import org.hibernate.Interceptor;
import org.hibernate.MappingException;
+import org.hibernate.MultiTenancyStrategy;
import org.hibernate.ObjectNotFoundException;
import org.hibernate.QueryException;
import org.hibernate.Session;
@@ -63,6 +64,8 @@ import org.hibernate.StatelessSession;
import org.hibernate.StatelessSessionBuilder;
import org.hibernate.TypeHelper;
import org.hibernate.boot.registry.StandardServiceRegistry;
+import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
+import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.internal.CacheDataDescriptionImpl;
import org.hibernate.cache.spi.CollectionRegion;
@@ -91,8 +94,13 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.function.SQLFunction;
import org.hibernate.dialect.function.SQLFunctionRegistry;
import org.hibernate.engine.ResultSetMappingDefinition;
+import org.hibernate.engine.config.spi.ConfigurationService;
+import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
+import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
+import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
+import org.hibernate.engine.jndi.spi.JndiService;
import org.hibernate.engine.profile.Association;
import org.hibernate.engine.profile.Fetch;
import org.hibernate.engine.profile.FetchProfile;
@@ -107,6 +115,7 @@ import org.hibernate.engine.spi.SessionBuilderImplementor;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionOwner;
import org.hibernate.engine.transaction.internal.TransactionCoordinatorImpl;
+import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
import org.hibernate.engine.transaction.spi.TransactionEnvironment;
import org.hibernate.exception.spi.SQLExceptionConverter;
import org.hibernate.id.IdentifierGenerator;
@@ -131,12 +140,6 @@ import org.hibernate.persister.entity.Queryable;
import org.hibernate.persister.spi.PersisterFactory;
import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.service.ServiceRegistry;
-import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
-import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
-import org.hibernate.engine.config.spi.ConfigurationService;
-import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
-import org.hibernate.engine.jndi.spi.JndiService;
-import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
import org.hibernate.service.spi.SessionFactoryServiceRegistryFactory;
@@ -150,6 +153,7 @@ import org.hibernate.tuple.entity.EntityTuplizer;
import org.hibernate.type.AssociationType;
import org.hibernate.type.Type;
import org.hibernate.type.TypeResolver;
+import org.jboss.logging.Logger;
/**
@@ -531,6 +535,15 @@ public final class SessionFactoryImpl
LOG.debug( "Instantiated session factory" );
+ settings.getMultiTableBulkIdStrategy().prepare(
+ jdbcServices,
+ buildLocalConnectionAccess(),
+ cfg.createMappings(),
+ cfg.buildMapping(),
+ properties
+ );
+
+
if ( settings.isAutoCreateSchema() ) {
new SchemaExport( serviceRegistry, cfg )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) )
@@ -557,7 +570,7 @@ public final class SessionFactoryImpl
String sep = "";
for ( Map.Entry entry : errors.entrySet() ) {
LOG.namedQueryError( entry.getKey(), entry.getValue() );
- failingQueries.append( entry.getKey() ).append( sep );
+ failingQueries.append( sep ).append( entry.getKey() );
sep = ", ";
}
throw new HibernateException( failingQueries.toString() );
@@ -616,6 +629,32 @@ public final class SessionFactoryImpl
}
}
+ private JdbcConnectionAccess buildLocalConnectionAccess() {
+ return new JdbcConnectionAccess() {
+ @Override
+ public Connection obtainConnection() throws SQLException {
+ return settings.getMultiTenancyStrategy() == MultiTenancyStrategy.NONE
+ ? serviceRegistry.getService( ConnectionProvider.class ).getConnection()
+ : serviceRegistry.getService( MultiTenantConnectionProvider.class ).getAnyConnection();
+ }
+
+ @Override
+ public void releaseConnection(Connection connection) throws SQLException {
+ if ( settings.getMultiTenancyStrategy() == MultiTenancyStrategy.NONE ) {
+ serviceRegistry.getService( ConnectionProvider.class ).closeConnection( connection );
+ }
+ else {
+ serviceRegistry.getService( MultiTenantConnectionProvider.class ).releaseAnyConnection( connection );
+ }
+ }
+
+ @Override
+ public boolean supportsAggressiveRelease() {
+ return false;
+ }
+ };
+ }
+
protected JpaMetaModelPopulationSetting determineJpaMetaModelPopulationSetting(Configuration cfg) {
final String setting = cfg.getProperties().getProperty( AvailableSettings.JPA_METAMODEL_POPULATION );
return JpaMetaModelPopulationSetting.parse( setting );
@@ -1420,6 +1459,8 @@ public final class SessionFactoryImpl
isClosed = true;
+ settings.getMultiTableBulkIdStrategy().release( jdbcServices, buildLocalConnectionAccess() );
+
Iterator iter = entityPersisters.values().iterator();
while ( iter.hasNext() ) {
EntityPersister p = (EntityPersister) iter.next();
diff --git a/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java b/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java
index 0fdef1d261..01d3066b1f 100644
--- a/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java
+++ b/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java
@@ -180,6 +180,8 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, SessionImpl.class.getName());
+ private static final boolean tracing = LOG.isTraceEnabled();
+
private transient long timestamp;
private transient SessionOwner sessionOwner;
@@ -309,7 +311,8 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
factory.getStatisticsImplementor().openSession();
}
- LOG.debugf( "Opened session at timestamp: %s", timestamp );
+ if (tracing)
+ LOG.tracef( "Opened session at timestamp: %s", timestamp );
}
@Override
@@ -2509,12 +2512,12 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
// synchronization (this process) was disabled
return;
}
- if ( ! isTransactionInProgress() ) {
- // not in a transaction so skip synchronization
+ if ( entityPersister.getEntityMetamodel().hasImmutableNaturalId() ) {
+ // only mutable natural-ids need this processing
return;
}
- if ( entityPersister.getEntityMetamodel().hasImmutableNaturalId() ) {
- // only mutable natural-ids need this processing
+ if ( ! isTransactionInProgress() ) {
+ // not in a transaction so skip synchronization
return;
}
@@ -2523,6 +2526,16 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
final Object entity = getPersistenceContext().getEntity( entityKey );
final EntityEntry entry = getPersistenceContext().getEntry( entity );
+ if ( entry == null ) {
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debug(
+ "Cached natural-id/pk resolution linked to null EntityEntry in persistence context : "
+ + MessageHelper.infoString( entityPersister, pk, getFactory() )
+ );
+ }
+ continue;
+ }
+
if ( !entry.requiresDirtyCheck( entity ) ) {
continue;
}
diff --git a/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java b/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java
index 10bafd49e7..19f3e64833 100644
--- a/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java
+++ b/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java
@@ -24,6 +24,7 @@
*/
package org.hibernate.internal.util;
+import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
@@ -66,6 +67,17 @@ public final class StringHelper {
return buf.toString();
}
+ public static String joinWithQualifier(String[] values, String qualifier, String deliminator) {
+ int length = values.length;
+ if ( length == 0 ) return "";
+ StringBuilder buf = new StringBuilder( length * values[0].length() )
+ .append( qualify( qualifier, values[0] ) );
+ for ( int i = 1; i < length; i++ ) {
+ buf.append( deliminator ).append( qualify( qualifier, values[i] ) );
+ }
+ return buf.toString();
+ }
+
public static String join(String seperator, Iterator objects) {
StringBuilder buf = new StringBuilder();
if ( objects.hasNext() ) buf.append( objects.next() );
@@ -89,6 +101,15 @@ public final class StringHelper {
return buf.toString();
}
+ public static String repeat(String string, int times, String deliminator) {
+ StringBuilder buf = new StringBuilder( ( string.length() * times ) + ( deliminator.length() * (times-1) ) )
+ .append( string );
+ for ( int i = 1; i < times; i++ ) {
+ buf.append( deliminator ).append( string );
+ }
+ return buf.toString();
+ }
+
public static String repeat(char character, int times) {
char[] buffer = new char[times];
Arrays.fill( buffer, character );
@@ -661,4 +682,69 @@ public final class StringHelper {
}
return unquoted;
}
+
+
+ public static final String BATCH_ID_PLACEHOLDER = "$$BATCH_ID_PLACEHOLDER$$";
+
+ public static StringBuilder buildBatchFetchRestrictionFragment(
+ String alias,
+ String[] columnNames,
+ Dialect dialect) {
+ // the general idea here is to just insert a placeholder that we can easily find later...
+ if ( columnNames.length == 1 ) {
+ // non-composite key
+ return new StringBuilder( StringHelper.qualify( alias, columnNames[0] ) )
+ .append( " in (" ).append( BATCH_ID_PLACEHOLDER ).append( ")" );
+ }
+ else {
+ // composite key - the form to use here depends on what the dialect supports.
+ if ( dialect.supportsRowValueConstructorSyntaxInInList() ) {
+ // use : (col1, col2) in ( (?,?), (?,?), ... )
+ StringBuilder builder = new StringBuilder();
+ builder.append( "(" );
+ boolean firstPass = true;
+ String deliminator = "";
+ for ( String columnName : columnNames ) {
+ builder.append( deliminator ).append( StringHelper.qualify( alias, columnName ) );
+ if ( firstPass ) {
+ firstPass = false;
+ deliminator = ",";
+ }
+ }
+ builder.append( ") in (" );
+ builder.append( BATCH_ID_PLACEHOLDER );
+ builder.append( ")" );
+ return builder;
+ }
+ else {
+ // use : ( (col1 = ? and col2 = ?) or (col1 = ? and col2 = ?) or ... )
+ // unfortunately most of this building needs to be held off until we know
+ // the exact number of ids :(
+ return new StringBuilder( "(" ).append( BATCH_ID_PLACEHOLDER ).append( ")" );
+ }
+ }
+ }
+
+ public static String expandBatchIdPlaceholder(
+ String sql,
+ Serializable[] ids,
+ String alias,
+ String[] keyColumnNames,
+ Dialect dialect) {
+ if ( keyColumnNames.length == 1 ) {
+ // non-composite
+ return StringHelper.replace( sql, BATCH_ID_PLACEHOLDER, repeat( "?", ids.length, "," ) );
+ }
+ else {
+ // composite
+ if ( dialect.supportsRowValueConstructorSyntaxInInList() ) {
+ final String tuple = "(" + StringHelper.repeat( "?", keyColumnNames.length, "," );
+ return StringHelper.replace( sql, BATCH_ID_PLACEHOLDER, repeat( tuple, ids.length, "," ) );
+ }
+ else {
+ final String keyCheck = joinWithQualifier( keyColumnNames, alias, " and " );
+ return replace( sql, BATCH_ID_PLACEHOLDER, repeat( keyCheck, ids.length, " or " ) );
+ }
+ }
+ }
}
diff --git a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java
index f2eb8cb6af..f43e35b6b4 100644
--- a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java
+++ b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java
@@ -24,6 +24,7 @@
*/
package org.hibernate.internal.util.collections;
+import java.io.Serializable;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
@@ -31,6 +32,7 @@ import java.util.Collection;
import java.util.Iterator;
import java.util.List;
+import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.type.Type;
@@ -372,10 +374,43 @@ public final class ArrayHelper {
}
return true;
}
+
+ public static Serializable[] extractNonNull(Serializable[] array) {
+ final int nonNullCount = countNonNull( array );
+ final Serializable[] result = new Serializable[nonNullCount];
+ int i = 0;
+ for ( Serializable element : array ) {
+ if ( element != null ) {
+ result[i++] = element;
+ }
+ }
+ if ( i != nonNullCount ) {
+ throw new HibernateException( "Number of non-null elements varied between iterations" );
+ }
+ return result;
+ }
+
+ public static int countNonNull(Serializable[] array) {
+ int i = 0;
+ for ( Serializable element : array ) {
+ if ( element != null ) {
+ i++;
+ }
+ }
+ return i;
+ }
+
+ public static void main(String... args) {
+ int[] batchSizes = ArrayHelper.getBatchSizes( 32 );
+
+ System.out.println( "Forward ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" );
+ for ( int i = 0; i < batchSizes.length; i++ ) {
+ System.out.println( "[" + i + "] -> " + batchSizes[i] );
+ }
+
+ System.out.println( "Backward ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" );
+ for ( int i = batchSizes.length-1; i >= 0; i-- ) {
+ System.out.println( "[" + i + "] -> " + batchSizes[i] );
+ }
+ }
}
-
-
-
-
-
-
diff --git a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java
index 1d23c246f9..6839eb47c0 100644
--- a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java
+++ b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java
@@ -195,10 +195,10 @@ public final class IdentityMap implements Map {
}
public static final class IdentityMapEntry implements java.util.Map.Entry {
- private K key;
+ private final K key;
private V value;
- IdentityMapEntry(K key, V value) {
+ IdentityMapEntry(final K key, final V value) {
this.key=key;
this.value=value;
}
@@ -211,33 +211,54 @@ public final class IdentityMap implements Map {
return value;
}
- public V setValue(V value) {
+ public V setValue(final V value) {
V result = this.value;
this.value = value;
return result;
}
}
+ /**
+ * We need to base the identity on {@link System#identityHashCode(Object)} but
+ * attempt to lazily initialize and cache this value: being a native invocation
+ * it is an expensive value to retrieve.
+ */
public static final class IdentityKey implements Serializable {
- private K key;
+
+ private final K key;
+ private int hash = 0;
IdentityKey(K key) {
- this.key=key;
+ this.key = key;
}
@SuppressWarnings( {"EqualsWhichDoesntCheckParameterClass"})
@Override
- public boolean equals(Object other) {
+ public boolean equals(Object other) {
return key == ( (IdentityKey) other ).key;
}
@Override
- public int hashCode() {
- return System.identityHashCode(key);
+ public int hashCode() {
+ if ( this.hash == 0 ) {
+ //We consider "zero" as non-initialized value
+ final int newHash = System.identityHashCode( key );
+ if ( newHash == 0 ) {
+ //So make sure we don't store zeros as it would trigger initialization again:
+ //any value is fine as long as we're deterministic.
+ this.hash = -1;
+ return -1;
+ }
+ else {
+ this.hash = newHash;
+ return newHash;
+ }
+ }
+ return hash;
}
@Override
- public String toString() {
+ public String toString() {
return key.toString();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java b/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java
index 23c5a7b01a..673741e8fb 100755
--- a/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java
@@ -187,10 +187,7 @@ public abstract class AbstractEntityJoinWalker extends JoinWalker {
public abstract String getComment();
@Override
- protected boolean isDuplicateAssociation(
- final String foreignKeyTable,
- final String[] foreignKeyColumns
- ) {
+ protected boolean isDuplicateAssociation(final String foreignKeyTable, final String[] foreignKeyColumns) {
//disable a join back to this same association
final boolean isSameJoin =
persister.getTableName().equals( foreignKeyTable ) &&
@@ -201,11 +198,11 @@ public abstract class AbstractEntityJoinWalker extends JoinWalker {
- protected final Loadable getPersister() {
+ public final Loadable getPersister() {
return persister;
}
- protected final String getAlias() {
+ public final String getAlias() {
return alias;
}
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/BatchFetchStyle.java b/hibernate-core/src/main/java/org/hibernate/loader/BatchFetchStyle.java
new file mode 100644
index 0000000000..a429408cec
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/loader/BatchFetchStyle.java
@@ -0,0 +1,90 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.loader;
+
+import org.jboss.logging.Logger;
+
+/**
+ * Defines the style that should be used to perform batch loading. Which style to use is declared using
+ * the "{@value org.hibernate.cfg.AvailableSettings#BATCH_FETCH_STYLE}"
+ * ({@link org.hibernate.cfg.AvailableSettings#BATCH_FETCH_STYLE}) setting
+ *
+ * @author Steve Ebersole
+ */
+public enum BatchFetchStyle {
+ /**
+ * The legacy algorithm where we keep a set of pre-built batch sizes based on
+ * {@link org.hibernate.internal.util.collections.ArrayHelper#getBatchSizes}. Batches are performed
+ * using the next-smaller pre-built batch size from the number of existing batchable identifiers.
+ *
+ * For example, with a batch-size setting of 32 the pre-built batch sizes would be [32, 16, 10, 9, 8, 7, .., 1].
+ * An attempt to batch load 31 identifiers would result in batches of 16, 10, and 5.
+ */
+ LEGACY,
+ /**
+ * Still keeps the concept of pre-built batch sizes, but uses the next-bigger batch size and pads the extra
+ * identifier placeholders.
+ *
+ * Using the same example of a batch-size setting of 32 the pre-built batch sizes would be the same. However, the
+ * attempt to batch load 31 identifiers would result just a single batch of size 32. The identifiers to load would
+ * be "padded" (aka, repeated) to make up the difference.
+ */
+ PADDED,
+ /**
+ * Dynamically builds its SQL based on the actual number of available ids. Does still limit to the batch-size
+ * defined on the entity/collection
+ */
+ DYNAMIC;
+
+ private static final Logger log = Logger.getLogger( BatchFetchStyle.class );
+
+ public static BatchFetchStyle byName(String name) {
+ return valueOf( name.toUpperCase() );
+ }
+
+ public static BatchFetchStyle interpret(Object setting) {
+ log.tracef( "Interpreting BatchFetchStyle from setting : %s", setting );
+
+ if ( setting == null ) {
+ return LEGACY; // as default
+ }
+
+ if ( BatchFetchStyle.class.isInstance( setting ) ) {
+ return (BatchFetchStyle) setting;
+ }
+
+ try {
+ final BatchFetchStyle byName = byName( setting.toString() );
+ if ( byName != null ) {
+ return byName;
+ }
+ }
+ catch (Exception ignore) {
+ }
+
+ log.debugf( "Unable to interpret given setting [%s] as BatchFetchStyle", setting );
+
+ return LEGACY; // again as default.
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java b/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java
index 3aaa7240fc..bf2b022679 100755
--- a/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java
@@ -539,6 +539,34 @@ public class JoinWalker {
);
}
}
+
+ // if the entity has a composite identifier, see if we need to handle
+ // its sub-properties separately
+ final Type idType = persister.getIdentifierType();
+ if ( idType.isComponentType() ) {
+ final CompositeType cidType = (CompositeType) idType;
+ if ( cidType.isEmbedded() ) {
+ // we have an embedded composite identifier. Most likely we need to process the composite
+ // properties separately, although there is an edge case where the identifier is really
+ // a simple identifier (single value) wrapped in a JPA @IdClass or even in the case of a
+ // a simple identifier (single value) wrapped in a Hibernate composite type.
+ //
+ // We really do not have a built-in method to determine that. However, generally the
+ // persister would report that there is single, physical identifier property which is
+ // explicitly at odds with the notion of "embedded composite". So we use that for now
+ if ( persister.getEntityMetamodel().getIdentifierProperty().isEmbedded() ) {
+ walkComponentTree(
+ cidType,
+ -1,
+ 0,
+ persister,
+ alias,
+ path,
+ currentDepth
+ );
+ }
+ }
+ }
}
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/Loader.java b/hibernate-core/src/main/java/org/hibernate/loader/Loader.java
index 968d0e899b..2370a7c921 100644
--- a/hibernate-core/src/main/java/org/hibernate/loader/Loader.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/Loader.java
@@ -119,7 +119,7 @@ public abstract class Loader {
*
* @return The sql command this loader should use to get its {@link ResultSet}.
*/
- protected abstract String getSQLString();
+ public abstract String getSQLString();
/**
* An array of persisters of entity classes contained in each row of results;
@@ -256,7 +256,7 @@ public abstract class Loader {
* persister from each row of the ResultSet. If an object is supplied, will attempt to
* initialize that object. If a collection is supplied, attempt to initialize that collection.
*/
- private List doQueryAndInitializeNonLazyCollections(
+ public List doQueryAndInitializeNonLazyCollections(
final SessionImplementor session,
final QueryParameters queryParameters,
final boolean returnProxies) throws HibernateException, SQLException {
@@ -268,7 +268,7 @@ public abstract class Loader {
);
}
- private List doQueryAndInitializeNonLazyCollections(
+ public List doQueryAndInitializeNonLazyCollections(
final SessionImplementor session,
final QueryParameters queryParameters,
final boolean returnProxies,
@@ -381,12 +381,21 @@ public abstract class Loader {
hydratedObjects,
loadedKeys,
returnProxies
+ );
+ if ( ! keyToRead.equals( loadedKeys[0] ) ) {
+ throw new AssertionFailure(
+ String.format(
+ "Unexpected key read for row; expected [%s]; actual [%s]",
+ keyToRead,
+ loadedKeys[0] )
);
+ }
if ( result == null ) {
result = loaded;
}
}
- while ( keyToRead.equals( loadedKeys[0] ) && resultSet.next() );
+ while ( resultSet.next() &&
+ isCurrentRowForSameEntity( keyToRead, 0, resultSet, session ) );
}
catch ( SQLException sqle ) {
throw factory.getSQLExceptionHelper().convert(
@@ -406,6 +415,17 @@ public abstract class Loader {
return result;
}
+ private boolean isCurrentRowForSameEntity(
+ final EntityKey keyToRead,
+ final int persisterIndex,
+ final ResultSet resultSet,
+ final SessionImplementor session) throws SQLException {
+ EntityKey currentRowKey = getKeyFromResultSet(
+ persisterIndex, getEntityPersisters()[persisterIndex], null, resultSet, session
+ );
+ return keyToRead.equals( currentRowKey );
+ }
+
/**
* Loads a single logical row from the result set moving forward. This is the
* processing used from the ScrollableResults where there were collection fetches
@@ -1017,7 +1037,16 @@ public abstract class Loader {
}
}
}
-
+
+ // Until this entire method is refactored w/ polymorphism, postLoad was
+ // split off from initializeEntity. It *must* occur after
+ // endCollectionLoad to ensure the collection is in the
+ // persistence context.
+ if ( hydratedObjects!=null ) {
+ for ( Object hydratedObject : hydratedObjects ) {
+ TwoPhaseLoad.postLoad( hydratedObject, session, post );
+ }
+ }
}
private void endCollectionLoad(
@@ -1693,8 +1722,17 @@ public abstract class Loader {
final QueryParameters queryParameters,
final boolean scroll,
final SessionImplementor session) throws SQLException {
+ return executeQueryStatement( getSQLString(), queryParameters, scroll, session );
+ }
+
+ protected ResultSet executeQueryStatement(
+ final String sqlStatement,
+ final QueryParameters queryParameters,
+ final boolean scroll,
+ final SessionImplementor session) throws SQLException {
+
// Processing query filters.
- queryParameters.processFilters( getSQLString(), session );
+ queryParameters.processFilters( sqlStatement, session );
// Applying LIMIT clause.
final LimitHandler limitHandler = getLimitHandler(
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java
index 6b41233f10..9d295cbe25 100644
--- a/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java
@@ -75,7 +75,8 @@ public abstract class OuterJoinLoader extends BasicLoader {
return collectionSuffixes;
}
- protected final String getSQLString() {
+ @Override
+ public final String getSQLString() {
return sql;
}
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java b/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java
index 413e74d084..e659d2b5de 100644
--- a/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,106 +20,35 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.loader.collection;
-import java.io.Serializable;
-import org.hibernate.HibernateException;
-import org.hibernate.MappingException;
-import org.hibernate.engine.spi.LoadQueryInfluencers;
-import org.hibernate.engine.spi.SessionFactoryImplementor;
-import org.hibernate.engine.spi.SessionImplementor;
-import org.hibernate.internal.util.collections.ArrayHelper;
-import org.hibernate.loader.Loader;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.collection.QueryableCollection;
/**
- * "Batch" loads collections, using multiple foreign key values in the
- * SQL where clause.
+ * The base contract for loaders capable of performing batch-fetch loading of collections using multiple foreign key
+ * values in the SQL WHERE clause.
*
+ * @author Gavin King
+ * @author Steve Ebersole
+ *
+ * @see BatchingCollectionInitializerBuilder
* @see BasicCollectionLoader
* @see OneToManyLoader
- * @author Gavin King
*/
-public class BatchingCollectionInitializer implements CollectionInitializer {
- private final Loader[] loaders;
- private final int[] batchSizes;
- private final CollectionPersister collectionPersister;
+public abstract class BatchingCollectionInitializer implements CollectionInitializer {
+ private final QueryableCollection collectionPersister;
- public BatchingCollectionInitializer(CollectionPersister collPersister, int[] batchSizes, Loader[] loaders) {
- this.loaders = loaders;
- this.batchSizes = batchSizes;
- this.collectionPersister = collPersister;
+ public BatchingCollectionInitializer(QueryableCollection collectionPersister) {
+ this.collectionPersister = collectionPersister;
}
public CollectionPersister getCollectionPersister() {
return collectionPersister;
}
- public Loader[] getLoaders() {
- return loaders;
+ public QueryableCollection collectionPersister() {
+ return collectionPersister;
}
-
- public int[] getBatchSizes() {
- return batchSizes;
- }
-
- public void initialize(Serializable id, SessionImplementor session)
- throws HibernateException {
-
- Serializable[] batch = session.getPersistenceContext().getBatchFetchQueue()
- .getCollectionBatch( collectionPersister, id, batchSizes[0] );
-
- for ( int i=0; i 1 ) {
- int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize);
- Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ];
- for ( int i=0; i 1 ) {
- int[] batchSizesToCreate = ArrayHelper.getBatchSizes( maxBatchSize );
- Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ];
- for ( int i=0; i= numberOfIds ) {
+ indexToUse = i;
+ }
+ else {
+ break;
+ }
+ }
+
+ final Serializable[] idsToLoad = new Serializable[ batchSizes[indexToUse] ];
+ System.arraycopy( batch, 0, idsToLoad, 0, numberOfIds );
+ for ( int i = numberOfIds; i < batchSizes[indexToUse]; i++ ) {
+ idsToLoad[i] = id;
+ }
+
+ loaders[indexToUse].loadCollectionBatch( session, idsToLoad, collectionPersister().getKeyType() );
+ }
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java
index ff48d3ec19..8eaca13cb2 100755
--- a/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java
@@ -277,7 +277,7 @@ public class CustomLoader extends Loader {
}
@Override
- protected String getSQLString() {
+ public String getSQLString() {
return sql;
}
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java
index ef7469aa8f..bde9a8c24a 100644
--- a/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,135 +20,109 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.loader.entity;
import java.io.Serializable;
-import java.util.Iterator;
+import java.sql.SQLException;
+import java.util.Arrays;
import java.util.List;
-import org.hibernate.LockMode;
+import org.jboss.logging.Logger;
+
import org.hibernate.LockOptions;
-import org.hibernate.MappingException;
-import org.hibernate.engine.spi.LoadQueryInfluencers;
-import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionImplementor;
-import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.loader.Loader;
import org.hibernate.persister.entity.EntityPersister;
-import org.hibernate.persister.entity.OuterJoinLoadable;
+import org.hibernate.pretty.MessageHelper;
import org.hibernate.type.Type;
/**
- * "Batch" loads entities, using multiple primary key values in the
- * SQL where clause.
+ * The base contract for loaders capable of performing batch-fetch loading of entities using multiple primary key
+ * values in the SQL WHERE clause.
*
- * @see EntityLoader
* @author Gavin King
+ * @author Steve Ebersole
+ *
+ * @see BatchingEntityLoaderBuilder
+ * @see UniqueEntityLoader
*/
-public class BatchingEntityLoader implements UniqueEntityLoader {
+public abstract class BatchingEntityLoader implements UniqueEntityLoader {
+ private static final Logger log = Logger.getLogger( BatchingEntityLoader.class );
- private final Loader[] loaders;
- private final int[] batchSizes;
private final EntityPersister persister;
- private final Type idType;
- public BatchingEntityLoader(EntityPersister persister, int[] batchSizes, Loader[] loaders) {
- this.batchSizes = batchSizes;
- this.loaders = loaders;
+ public BatchingEntityLoader(EntityPersister persister) {
this.persister = persister;
- idType = persister.getIdentifierType();
}
- private Object getObjectFromList(List results, Serializable id, SessionImplementor session) {
- // get the right object from the list ... would it be easier to just call getEntity() ??
- Iterator iter = results.iterator();
- while ( iter.hasNext() ) {
- Object obj = iter.next();
- final boolean equal = idType.isEqual(
+ public EntityPersister persister() {
+ return persister;
+ }
+
+ @Override
+ @Deprecated
+ public Object load(Serializable id, Object optionalObject, SessionImplementor session) {
+ return load( id, optionalObject, session, LockOptions.NONE );
+ }
+
+ protected QueryParameters buildQueryParameters(
+ Serializable id,
+ Serializable[] ids,
+ Object optionalObject,
+ LockOptions lockOptions) {
+ Type[] types = new Type[ids.length];
+ Arrays.fill( types, persister().getIdentifierType() );
+
+ QueryParameters qp = new QueryParameters();
+ qp.setPositionalParameterTypes( types );
+ qp.setPositionalParameterValues( ids );
+ qp.setOptionalObject( optionalObject );
+ qp.setOptionalEntityName( persister().getEntityName() );
+ qp.setOptionalId( id );
+ qp.setLockOptions( lockOptions );
+ return qp;
+ }
+
+ protected Object getObjectFromList(List results, Serializable id, SessionImplementor session) {
+ for ( Object obj : results ) {
+ final boolean equal = persister.getIdentifierType().isEqual(
id,
- session.getContextEntityIdentifier(obj),
+ session.getContextEntityIdentifier( obj ),
session.getFactory()
);
- if ( equal ) return obj;
+ if ( equal ) {
+ return obj;
+ }
}
return null;
}
- /**
- * {@inheritDoc}
- */
- public Object load(Serializable id, Object optionalObject, SessionImplementor session) {
- // this form is deprecated!
- return load( id, optionalObject, session, LockOptions.NONE );
- }
-
- public Object load(Serializable id, Object optionalObject, SessionImplementor session, LockOptions lockOptions) {
- Serializable[] batch = session.getPersistenceContext()
- .getBatchFetchQueue()
- .getEntityBatch( persister, id, batchSizes[0], persister.getEntityMode() );
-
- for ( int i=0; i1 ) {
- int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize);
- Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ];
- for ( int i=0; i1 ) {
- int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize);
- Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ];
- for ( int i=0; i= numberOfIds ) {
+ indexToUse = i;
+ }
+ else {
+ break;
+ }
+ }
+
+ final Serializable[] idsToLoad = new Serializable[ batchSizes[indexToUse] ];
+ System.arraycopy( batch, 0, idsToLoad, 0, numberOfIds );
+ for ( int i = numberOfIds; i < batchSizes[indexToUse]; i++ ) {
+ idsToLoad[i] = id;
+ }
+
+ return doBatchLoad( id, loaders[indexToUse], session, idsToLoad, optionalObject, lockOptions );
+ }
+ }
+
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java
index 4e70150733..f3e7982ffd 100644
--- a/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java
@@ -43,6 +43,7 @@ public interface UniqueEntityLoader {
* @deprecated use {@link #load(java.io.Serializable, Object, SessionImplementor, LockOptions)} instead.
*/
@SuppressWarnings( {"JavaDoc"})
+ @Deprecated
public Object load(Serializable id, Object optionalObject, SessionImplementor session) throws HibernateException;
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java
index 7e845d9b04..18ee1d5f95 100644
--- a/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java
+++ b/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java
@@ -239,7 +239,7 @@ public class QueryLoader extends BasicLoader {
/**
* The SQL query string to be called.
*/
- protected String getSQLString() {
+ public String getSQLString() {
return queryTranslator.getSQLString();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/Column.java b/hibernate-core/src/main/java/org/hibernate/mapping/Column.java
index 44dfc49d54..569b08e30d 100644
--- a/hibernate-core/src/main/java/org/hibernate/mapping/Column.java
+++ b/hibernate-core/src/main/java/org/hibernate/mapping/Column.java
@@ -343,7 +343,8 @@ public class Column implements Selectable, Serializable, Cloneable {
/**
* Shallow copy, the value is not copied
*/
- protected Object clone() {
+ @Override
+ public Column clone() {
Column copy = new Column();
copy.setLength( length );
copy.setScale( scale );
diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java b/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java
index 851f53e724..e8a299dd48 100644
--- a/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java
+++ b/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java
@@ -768,14 +768,14 @@ public abstract class PersistentClass implements Serializable, Filterable, MetaA
}
public void prepareTemporaryTables(Mapping mapping, Dialect dialect) {
+ temporaryIdTableName = dialect.generateTemporaryTableName( getTable().getName() );
if ( dialect.supportsTemporaryTables() ) {
- temporaryIdTableName = dialect.generateTemporaryTableName( getTable().getName() );
Table table = new Table();
table.setName( temporaryIdTableName );
Iterator itr = getTable().getPrimaryKey().getColumnIterator();
while( itr.hasNext() ) {
Column column = (Column) itr.next();
- table.addColumn( (Column) column.clone() );
+ table.addColumn( column.clone() );
}
temporaryIdTableDDL = table.sqlTemporaryTableCreateString( dialect, mapping );
}
diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/Property.java b/hibernate-core/src/main/java/org/hibernate/mapping/Property.java
index 6cb91d0f4e..5e0f9c5317 100644
--- a/hibernate-core/src/main/java/org/hibernate/mapping/Property.java
+++ b/hibernate-core/src/main/java/org/hibernate/mapping/Property.java
@@ -62,6 +62,7 @@ public class Property implements Serializable, MetaAttributable {
private java.util.Map metaAttributes;
private PersistentClass persistentClass;
private boolean naturalIdentifier;
+ private boolean lob;
public boolean isBackRef() {
return false;
@@ -343,4 +344,12 @@ public class Property implements Serializable, MetaAttributable {
this.naturalIdentifier = naturalIdentifier;
}
+ public boolean isLob() {
+ return lob;
+ }
+
+ public void setLob(boolean lob) {
+ this.lob = lob;
+ }
+
}
diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java b/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java
index 99f3b003dd..488fe357c7 100644
--- a/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java
+++ b/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java
@@ -24,7 +24,6 @@
package org.hibernate.mapping;
import java.lang.annotation.Annotation;
-import java.lang.reflect.Field;
import java.lang.reflect.TypeVariable;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
@@ -34,13 +33,12 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
-import javax.persistence.AttributeConverter;
-import org.jboss.logging.Logger;
+import javax.persistence.AttributeConverter;
import org.hibernate.FetchMode;
import org.hibernate.MappingException;
-import org.hibernate.cfg.AccessType;
+import org.hibernate.annotations.common.reflection.XProperty;
import org.hibernate.cfg.AttributeConverterDefinition;
import org.hibernate.cfg.Environment;
import org.hibernate.cfg.Mappings;
@@ -51,7 +49,6 @@ import org.hibernate.id.IdentityGenerator;
import org.hibernate.id.PersistentIdentifierGenerator;
import org.hibernate.id.factory.IdentifierGeneratorFactory;
import org.hibernate.internal.util.ReflectHelper;
-import org.hibernate.property.DirectPropertyAccessor;
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.ValueBinder;
@@ -65,6 +62,7 @@ import org.hibernate.type.descriptor.sql.JdbcTypeJavaClassMappings;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptorRegistry;
import org.hibernate.usertype.DynamicParameterizedType;
+import org.jboss.logging.Logger;
/**
* Any value that maps to columns.
@@ -527,29 +525,26 @@ public class SimpleValue implements KeyValue {
columnsNames[i] = ( (Column) columns.get( i ) ).getName();
}
- AccessType accessType = AccessType.getAccessStrategy( typeParameters
- .getProperty( DynamicParameterizedType.ACCESS_TYPE ) );
- final Class classEntity = ReflectHelper.classForName( typeParameters
- .getProperty( DynamicParameterizedType.ENTITY ) );
- final String propertyName = typeParameters.getProperty( DynamicParameterizedType.PROPERTY );
-
- Annotation[] annotations;
- if ( accessType == AccessType.FIELD ) {
- annotations = ( (Field) new DirectPropertyAccessor().getGetter( classEntity, propertyName ).getMember() )
- .getAnnotations();
-
- }
- else {
- annotations = ReflectHelper.getGetter( classEntity, propertyName ).getMethod().getAnnotations();
- }
+ final XProperty xProperty = (XProperty) typeParameters.get( DynamicParameterizedType.XPROPERTY );
+ // todo : not sure this works for handling @MapKeyEnumerated
+ final Annotation[] annotations = xProperty == null
+ ? null
+ : xProperty.getAnnotations();
typeParameters.put(
DynamicParameterizedType.PARAMETER_TYPE,
- new ParameterTypeImpl( ReflectHelper.classForName( typeParameters
- .getProperty( DynamicParameterizedType.RETURNED_CLASS ) ), annotations, table.getCatalog(),
- table.getSchema(), table.getName(), Boolean.valueOf( typeParameters
- .getProperty( DynamicParameterizedType.IS_PRIMARY_KEY ) ), columnsNames ) );
-
+ new ParameterTypeImpl(
+ ReflectHelper.classForName(
+ typeParameters.getProperty( DynamicParameterizedType.RETURNED_CLASS )
+ ),
+ annotations,
+ table.getCatalog(),
+ table.getSchema(),
+ table.getName(),
+ Boolean.valueOf( typeParameters.getProperty( DynamicParameterizedType.IS_PRIMARY_KEY ) ),
+ columnsNames
+ )
+ );
}
catch ( ClassNotFoundException cnfe ) {
throw new MappingException( "Could not create DynamicParameterizedType for type: " + typeName, cnfe );
@@ -612,4 +607,4 @@ public class SimpleValue implements KeyValue {
return columns;
}
}
-}
\ No newline at end of file
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/Table.java b/hibernate-core/src/main/java/org/hibernate/mapping/Table.java
index 4c2e96903d..3b34475107 100644
--- a/hibernate-core/src/main/java/org/hibernate/mapping/Table.java
+++ b/hibernate-core/src/main/java/org/hibernate/mapping/Table.java
@@ -328,6 +328,36 @@ public class Table implements RelationalModel, Serializable {
&& uniqueKey.getColumns().containsAll( primaryKey.getColumns() );
}
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result
+ + ((catalog == null) ? 0 : isCatalogQuoted() ? catalog.hashCode() : catalog.toLowerCase().hashCode());
+ result = prime * result + ((name == null) ? 0 : isQuoted() ? name.hashCode() : name.toLowerCase().hashCode());
+ result = prime * result
+ + ((schema == null) ? 0 : isSchemaQuoted() ? schema.hashCode() : schema.toLowerCase().hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object object) {
+ return object instanceof Table && equals((Table) object);
+ }
+
+ public boolean equals(Table table) {
+ if (null == table) {
+ return false;
+ }
+ if (this == table) {
+ return true;
+ }
+
+ return isQuoted() ? name.equals(table.getName()) : name.equalsIgnoreCase(table.getName())
+ && ((schema == null && table.getSchema() != null) ? false : (schema == null) ? true : isSchemaQuoted() ? schema.equals(table.getSchema()) : schema.equalsIgnoreCase(table.getSchema()))
+ && ((catalog == null && table.getCatalog() != null) ? false : (catalog == null) ? true : isCatalogQuoted() ? catalog.equals(table.getCatalog()) : catalog.equalsIgnoreCase(table.getCatalog()));
+ }
+
public void validateColumns(Dialect dialect, Mapping mapping, TableMetadata tableInfo) {
Iterator iter = getColumnIterator();
while ( iter.hasNext() ) {
@@ -394,7 +424,7 @@ public class Table implements RelationalModel, Serializable {
boolean useUniqueConstraint = column.isUnique() &&
dialect.supportsUnique() &&
- ( !column.isNullable() || dialect.supportsNotNullUnique() );
+ ( column.isNullable() || dialect.supportsNotNullUnique() );
if ( useUniqueConstraint ) {
alter.append( " unique" );
}
@@ -495,7 +525,7 @@ public class Table implements RelationalModel, Serializable {
}
boolean useUniqueConstraint = col.isUnique() &&
- ( !col.isNullable() || dialect.supportsNotNullUnique() );
+ ( col.isNullable() || dialect.supportsNotNullUnique() );
if ( useUniqueConstraint ) {
if ( dialect.supportsUnique() ) {
buf.append( " unique" );
diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java
index b64a7633bb..0094b30460 100644
--- a/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java
+++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java
@@ -1569,7 +1569,7 @@ public abstract class AbstractCollectionPersister
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Inserting collection: %s",
- MessageHelper.collectionInfoString( this, id, getFactory() ) );
+ MessageHelper.collectionInfoString( this, collection, id, session ) );
}
try {
@@ -1662,7 +1662,7 @@ public abstract class AbstractCollectionPersister
throw sqlExceptionHelper.convert(
sqle,
"could not insert collection: " +
- MessageHelper.collectionInfoString( this, id, getFactory() ),
+ MessageHelper.collectionInfoString( this, collection, id, session ),
getSQLInsertRowString()
);
}
@@ -1682,7 +1682,7 @@ public abstract class AbstractCollectionPersister
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Deleting rows of collection: %s",
- MessageHelper.collectionInfoString( this, id, getFactory() ) );
+ MessageHelper.collectionInfoString( this, collection, id, session ) );
}
boolean deleteByIndex = !isOneToMany() && hasIndex && !indexContainsFormula;
@@ -1770,7 +1770,7 @@ public abstract class AbstractCollectionPersister
throw sqlExceptionHelper.convert(
sqle,
"could not delete collection rows: " +
- MessageHelper.collectionInfoString( this, id, getFactory() ),
+ MessageHelper.collectionInfoString( this, collection, id, session ),
getSQLDeleteRowString()
);
}
@@ -1789,7 +1789,7 @@ public abstract class AbstractCollectionPersister
if ( !isInverse && isRowInsertEnabled() ) {
if ( LOG.isDebugEnabled() ) LOG.debugf( "Inserting rows of collection: %s",
- MessageHelper.collectionInfoString( this, id, getFactory() ) );
+ MessageHelper.collectionInfoString( this, collection, id, session ) );
try {
// insert all the new entries
@@ -1869,7 +1869,7 @@ public abstract class AbstractCollectionPersister
throw sqlExceptionHelper.convert(
sqle,
"could not insert collection rows: " +
- MessageHelper.collectionInfoString( this, id, getFactory() ),
+ MessageHelper.collectionInfoString( this, collection, id, session ),
getSQLInsertRowString()
);
}
@@ -2267,6 +2267,10 @@ public abstract class AbstractCollectionPersister
return initializer;
}
+ public int getBatchSize() {
+ return batchSize;
+ }
+
private class StandardOrderByAliasResolver implements OrderByAliasResolver {
private final String rootAlias;
@@ -2287,4 +2291,5 @@ public abstract class AbstractCollectionPersister
}
public abstract FilterAliasGenerator getFilterAliasGenerator(final String rootAlias);
+
}
diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java
index 3dfafd9b73..9af95ba2c2 100644
--- a/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java
+++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java
@@ -45,6 +45,7 @@ import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.jdbc.Expectation;
import org.hibernate.jdbc.Expectations;
import org.hibernate.loader.collection.BatchingCollectionInitializer;
+import org.hibernate.loader.collection.BatchingCollectionInitializerBuilder;
import org.hibernate.loader.collection.CollectionInitializer;
import org.hibernate.loader.collection.SubselectCollectionLoader;
import org.hibernate.mapping.Collection;
@@ -294,7 +295,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
catch ( SQLException sqle ) {
throw getSQLExceptionHelper().convert(
sqle,
- "could not update collection rows: " + MessageHelper.collectionInfoString( this, id, getFactory() ),
+ "could not update collection rows: " + MessageHelper.collectionInfoString( this, collection, id, session ),
getSQLUpdateRowString()
);
}
@@ -342,7 +343,8 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
@Override
protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers)
throws MappingException {
- return BatchingCollectionInitializer.createBatchingCollectionInitializer( this, batchSize, getFactory(), loadQueryInfluencers );
+ return BatchingCollectionInitializerBuilder.getBuilder( getFactory() )
+ .createBatchingCollectionInitializer( this, batchSize, getFactory(), loadQueryInfluencers );
}
@Override
public String fromJoinFragment(String alias, boolean innerJoin, boolean includeSubclasses) {
diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java
index 4abbd97d50..81900d648e 100644
--- a/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java
+++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java
@@ -299,4 +299,5 @@ public interface CollectionPersister {
public boolean indexExists(Serializable key, Object index, SessionImplementor session);
public boolean elementExists(Serializable key, Object element, SessionImplementor session);
public Object getElementByIndex(Serializable key, Object index, SessionImplementor session, Object owner);
+ public int getBatchSize();
}
diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java
index fa010c168a..88e9618a48 100644
--- a/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java
+++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java
@@ -44,6 +44,7 @@ import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.jdbc.Expectation;
import org.hibernate.jdbc.Expectations;
import org.hibernate.loader.collection.BatchingCollectionInitializer;
+import org.hibernate.loader.collection.BatchingCollectionInitializerBuilder;
import org.hibernate.loader.collection.CollectionInitializer;
import org.hibernate.loader.collection.SubselectOneToManyLoader;
import org.hibernate.loader.entity.CollectionElementLoader;
@@ -352,7 +353,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
throw getFactory().getSQLExceptionHelper().convert(
sqle,
"could not update collection rows: " +
- MessageHelper.collectionInfoString( this, id, getFactory() ),
+ MessageHelper.collectionInfoString( this, collection, id, session ),
getSQLInsertRowString()
);
}
@@ -384,7 +385,8 @@ public class OneToManyPersister extends AbstractCollectionPersister {
@Override
protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers)
throws MappingException {
- return BatchingCollectionInitializer.createBatchingOneToManyInitializer( this, batchSize, getFactory(), loadQueryInfluencers );
+ return BatchingCollectionInitializerBuilder.getBuilder( getFactory() )
+ .createBatchingOneToManyInitializer( this, batchSize, getFactory(), loadQueryInfluencers );
}
public String fromJoinFragment(String alias,
diff --git a/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java
index de6fee5548..a96f091e8f 100644
--- a/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java
+++ b/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java
@@ -92,6 +92,7 @@ import org.hibernate.jdbc.Expectation;
import org.hibernate.jdbc.Expectations;
import org.hibernate.jdbc.TooManyRowsAffectedException;
import org.hibernate.loader.entity.BatchingEntityLoader;
+import org.hibernate.loader.entity.BatchingEntityLoaderBuilder;
import org.hibernate.loader.entity.CascadeEntityLoader;
import org.hibernate.loader.entity.EntityLoader;
import org.hibernate.loader.entity.UniqueEntityLoader;
@@ -192,6 +193,8 @@ public abstract class AbstractEntityPersister
private final boolean[][] propertyColumnInsertable;
private final boolean[] propertyUniqueness;
private final boolean[] propertySelectable;
+
+ private final List lobProperties = new ArrayList();
//information about lazy properties of this class
private final String[] lazyPropertyNames;
@@ -638,6 +641,10 @@ public abstract class AbstractEntityPersister
propertySelectable[i] = prop.isSelectable();
propertyUniqueness[i] = prop.getValue().isAlternateUniqueKey();
+
+ if (prop.isLob() && getFactory().getDialect().forceLobAsLastValue() ) {
+ lobProperties.add( i );
+ }
i++;
@@ -2521,26 +2528,16 @@ public abstract class AbstractEntityPersister
LockMode lockMode,
LoadQueryInfluencers loadQueryInfluencers) throws MappingException {
//TODO: disable batch loading if lockMode > READ?
- return BatchingEntityLoader.createBatchingEntityLoader(
- this,
- batchSize,
- lockMode,
- getFactory(),
- loadQueryInfluencers
- );
+ return BatchingEntityLoaderBuilder.getBuilder( getFactory() )
+ .buildLoader( this, batchSize, lockMode, getFactory(), loadQueryInfluencers );
}
protected UniqueEntityLoader createEntityLoader(
LockOptions lockOptions,
LoadQueryInfluencers loadQueryInfluencers) throws MappingException {
//TODO: disable batch loading if lockMode > READ?
- return BatchingEntityLoader.createBatchingEntityLoader(
- this,
- batchSize,
- lockOptions,
- getFactory(),
- loadQueryInfluencers
- );
+ return BatchingEntityLoaderBuilder.getBuilder( getFactory() )
+ .buildLoader( this, batchSize, lockOptions, getFactory(), loadQueryInfluencers );
}
protected UniqueEntityLoader createEntityLoader(LockMode lockMode) throws MappingException {
@@ -2597,12 +2594,26 @@ public abstract class AbstractEntityPersister
boolean hasColumns = false;
for ( int i = 0; i < entityMetamodel.getPropertySpan(); i++ ) {
- if ( includeProperty[i] && isPropertyOfTable( i, j ) ) {
+ if ( includeProperty[i] && isPropertyOfTable( i, j )
+ && !lobProperties.contains( i ) ) {
// this is a property of the table, which we are updating
- update.addColumns( getPropertyColumnNames(i), propertyColumnUpdateable[i], propertyColumnWriters[i] );
+ update.addColumns( getPropertyColumnNames(i),
+ propertyColumnUpdateable[i], propertyColumnWriters[i] );
hasColumns = hasColumns || getPropertyColumnSpan( i ) > 0;
}
}
+
+ // HHH-4635
+ // Oracle expects all Lob properties to be last in inserts
+ // and updates. Insert them at the end.
+ for ( int i : lobProperties ) {
+ if ( includeProperty[i] && isPropertyOfTable( i, j ) ) {
+ // this property belongs on the table and is to be inserted
+ update.addColumns( getPropertyColumnNames(i),
+ propertyColumnUpdateable[i], propertyColumnWriters[i] );
+ hasColumns = true;
+ }
+ }
if ( j == 0 && isVersioned() && entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.VERSION ) {
// this is the root (versioned) table, and we are using version-based
@@ -2668,7 +2679,8 @@ public abstract class AbstractEntityPersister
/**
* Generate the SQL that inserts a row
*/
- protected String generateInsertString(boolean identityInsert, boolean[] includeProperty, int j) {
+ protected String generateInsertString(boolean identityInsert,
+ boolean[] includeProperty, int j) {
// todo : remove the identityInsert param and variations;
// identity-insert strings are now generated from generateIdentityInsertString()
@@ -2678,9 +2690,13 @@ public abstract class AbstractEntityPersister
// add normal properties
for ( int i = 0; i < entityMetamodel.getPropertySpan(); i++ ) {
- if ( includeProperty[i] && isPropertyOfTable( i, j ) ) {
+
+ if ( includeProperty[i] && isPropertyOfTable( i, j )
+ && !lobProperties.contains( i ) ) {
// this property belongs on the table and is to be inserted
- insert.addColumns( getPropertyColumnNames(i), propertyColumnInsertable[i], propertyColumnWriters[i] );
+ insert.addColumns( getPropertyColumnNames(i),
+ propertyColumnInsertable[i],
+ propertyColumnWriters[i] );
}
}
@@ -2700,6 +2716,18 @@ public abstract class AbstractEntityPersister
if ( getFactory().getSettings().isCommentsEnabled() ) {
insert.setComment( "insert " + getEntityName() );
}
+
+ // HHH-4635
+ // Oracle expects all Lob properties to be last in inserts
+ // and updates. Insert them at the end.
+ for ( int i : lobProperties ) {
+ if ( includeProperty[i] && isPropertyOfTable( i, j ) ) {
+ // this property belongs on the table and is to be inserted
+ insert.addColumns( getPropertyColumnNames(i),
+ propertyColumnInsertable[i],
+ propertyColumnWriters[i] );
+ }
+ }
String result = insert.toStatementString();
@@ -2767,8 +2795,9 @@ public abstract class AbstractEntityPersister
boolean[][] includeColumns,
int j,
PreparedStatement st,
- SessionImplementor session) throws HibernateException, SQLException {
- return dehydrate( id, fields, null, includeProperty, includeColumns, j, st, session, 1 );
+ SessionImplementor session,
+ boolean isUpdate) throws HibernateException, SQLException {
+ return dehydrate( id, fields, null, includeProperty, includeColumns, j, st, session, 1, isUpdate );
}
/**
@@ -2783,32 +2812,58 @@ public abstract class AbstractEntityPersister
final int j,
final PreparedStatement ps,
final SessionImplementor session,
- int index) throws SQLException, HibernateException {
+ int index,
+ boolean isUpdate ) throws SQLException, HibernateException {
if ( LOG.isTraceEnabled() ) {
LOG.tracev( "Dehydrating entity: {0}", MessageHelper.infoString( this, id, getFactory() ) );
}
for ( int i = 0; i < entityMetamodel.getPropertySpan(); i++ ) {
- if ( includeProperty[i] && isPropertyOfTable( i, j ) ) {
+ if ( includeProperty[i] && isPropertyOfTable( i, j )
+ && !lobProperties.contains( i )) {
getPropertyTypes()[i].nullSafeSet( ps, fields[i], index, includeColumns[i], session );
- //index += getPropertyColumnSpan( i );
index += ArrayHelper.countTrue( includeColumns[i] ); //TODO: this is kinda slow...
}
}
-
- if ( rowId != null ) {
- ps.setObject( index, rowId );
- index += 1;
+
+ if ( !isUpdate ) {
+ index += dehydrateId( id, rowId, ps, session, index );
}
- else if ( id != null ) {
- getIdentifierType().nullSafeSet( ps, id, index, session );
- index += getIdentifierColumnSpan();
+
+ // HHH-4635
+ // Oracle expects all Lob properties to be last in inserts
+ // and updates. Insert them at the end.
+ for ( int i : lobProperties ) {
+ if ( includeProperty[i] && isPropertyOfTable( i, j ) ) {
+ getPropertyTypes()[i].nullSafeSet( ps, fields[i], index, includeColumns[i], session );
+ index += ArrayHelper.countTrue( includeColumns[i] ); //TODO: this is kinda slow...
+ }
+ }
+
+ if ( isUpdate ) {
+ index += dehydrateId( id, rowId, ps, session, index );
}
return index;
}
+
+ private int dehydrateId(
+ final Serializable id,
+ final Object rowId,
+ final PreparedStatement ps,
+ final SessionImplementor session,
+ int index ) throws SQLException {
+ if ( rowId != null ) {
+ ps.setObject( index, rowId );
+ return 1;
+ } else if ( id != null ) {
+ getIdentifierType().nullSafeSet( ps, id, index, session );
+ return getIdentifierColumnSpan();
+ }
+ return 0;
+ }
/**
* Unmarshall the fields of a persistent instance from a result set,
@@ -2949,7 +3004,7 @@ public abstract class AbstractEntityPersister
Binder binder = new Binder() {
public void bindValues(PreparedStatement ps) throws SQLException {
- dehydrate( null, fields, notNull, propertyColumnInsertable, 0, ps, session );
+ dehydrate( null, fields, notNull, propertyColumnInsertable, 0, ps, session, false );
}
public Object getEntity() {
return object;
@@ -3046,7 +3101,7 @@ public abstract class AbstractEntityPersister
// Write the values of fields onto the prepared statement - we MUST use the state at the time the
// insert was issued (cos of foreign key constraints). Not necessarily the object's current state
- dehydrate( id, fields, null, notNull, propertyColumnInsertable, j, insert, session, index );
+ dehydrate( id, fields, null, notNull, propertyColumnInsertable, j, insert, session, index, false );
if ( useBatch ) {
session.getTransactionCoordinator().getJdbcCoordinator().getBatch( inserBatchKey ).addToBatch();
@@ -3174,7 +3229,7 @@ public abstract class AbstractEntityPersister
index+= expectation.prepare( update );
//Now write the values of fields onto the prepared statement
- index = dehydrate( id, fields, rowId, includeProperty, propertyColumnUpdateable, j, update, session, index );
+ index = dehydrate( id, fields, rowId, includeProperty, propertyColumnUpdateable, j, update, session, index, true );
// Write any appropriate versioning conditional parameters
if ( useVersion && entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.VERSION ) {
diff --git a/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java b/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java
index 7da348880d..f519ecdc41 100644
--- a/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java
+++ b/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java
@@ -25,7 +25,9 @@
package org.hibernate.pretty;
import java.io.Serializable;
+import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.type.Type;
@@ -234,7 +236,52 @@ public final class MessageHelper {
// collections ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ /**
+ * Generate an info message string relating to a particular managed
+ * collection. Attempts to intelligently handle property-refs issues
+ * where the collection key is not the same as the owner key.
+ *
+ * @param persister The persister for the collection
+ * @param collection The collection itself
+ * @param collectionKey The collection key
+ * @param session The session
+ * @return An info string, in the form [Foo.bars#1]
+ */
+ public static String collectionInfoString(
+ CollectionPersister persister,
+ PersistentCollection collection,
+ Serializable collectionKey,
+ SessionImplementor session ) {
+
+ StringBuilder s = new StringBuilder();
+ s.append( '[' );
+ if ( persister == null ) {
+ s.append( "" );
+ }
+ else {
+ s.append( persister.getRole() );
+ s.append( '#' );
+
+ Type ownerIdentifierType = persister.getOwnerEntityPersister()
+ .getIdentifierType();
+ Serializable ownerKey;
+ // TODO: Is it redundant to attempt to use the collectionKey,
+ // or is always using the owner id sufficient?
+ if ( collectionKey.getClass().isAssignableFrom(
+ ownerIdentifierType.getReturnedClass() ) ) {
+ ownerKey = collectionKey;
+ } else {
+ ownerKey = session.getPersistenceContext()
+ .getEntry( collection.getOwner() ).getId();
+ }
+ s.append( ownerIdentifierType.toLoggableString(
+ ownerKey, session.getFactory() ) );
+ }
+ s.append( ']' );
+ return s.toString();
+ }
/**
* Generate an info message string relating to a series of managed
@@ -258,11 +305,7 @@ public final class MessageHelper {
s.append( persister.getRole() );
s.append( "#<" );
for ( int i = 0; i < ids.length; i++ ) {
- // Need to use the identifier type of the collection owner
- // since the incoming is value is actually the owner's id.
- // Using the collection's key type causes problems with
- // property-ref keys...
- s.append( persister.getOwnerEntityPersister().getIdentifierType().toLoggableString( ids[i], factory ) );
+ addIdToCollectionInfoString( persister, ids[i], factory, s );
if ( i < ids.length-1 ) {
s.append( ", " );
}
@@ -299,17 +342,37 @@ public final class MessageHelper {
s.append( "" );
}
else {
- // Need to use the identifier type of the collection owner
- // since the incoming is value is actually the owner's id.
- // Using the collection's key type causes problems with
- // property-ref keys...
- s.append( persister.getOwnerEntityPersister().getIdentifierType().toLoggableString( id, factory ) );
+ addIdToCollectionInfoString( persister, id, factory, s );
}
}
s.append( ']' );
return s.toString();
}
+
+ private static void addIdToCollectionInfoString(
+ CollectionPersister persister,
+ Serializable id,
+ SessionFactoryImplementor factory,
+ StringBuilder s ) {
+ // Need to use the identifier type of the collection owner
+ // since the incoming is value is actually the owner's id.
+ // Using the collection's key type causes problems with
+ // property-ref keys.
+ // Also need to check that the expected identifier type matches
+ // the given ID. Due to property-ref keys, the collection key
+ // may not be the owner key.
+ Type ownerIdentifierType = persister.getOwnerEntityPersister()
+ .getIdentifierType();
+ if ( id.getClass().isAssignableFrom(
+ ownerIdentifierType.getReturnedClass() ) ) {
+ s.append( ownerIdentifierType.toLoggableString( id, factory ) );
+ } else {
+ // TODO: This is a crappy backup if a property-ref is used.
+ // If the reference is an object w/o toString(), this isn't going to work.
+ s.append( id.toString() );
+ }
+ }
/**
* Generate an info message string relating to a particular managed
diff --git a/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java b/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java
index 5d06d3c2ec..bf4f94c484 100755
--- a/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java
+++ b/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java
@@ -26,19 +26,19 @@ package org.hibernate.proxy;
import java.io.Serializable;
import javax.naming.NamingException;
-import org.jboss.logging.Logger;
+import javax.naming.NamingException;
import org.hibernate.HibernateException;
import org.hibernate.LazyInitializationException;
import org.hibernate.Session;
import org.hibernate.SessionException;
import org.hibernate.TransientObjectException;
-import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.SessionFactoryRegistry;
import org.hibernate.persister.entity.EntityPersister;
+import org.jboss.logging.Logger;
/**
* Convenience base class for lazy initialization handlers. Centralizes the basic plumbing of doing lazy
@@ -191,6 +191,22 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
SessionFactoryImplementor sf = (SessionFactoryImplementor)
SessionFactoryRegistry.INSTANCE.getSessionFactory( sessionFactoryUuid );
SessionImplementor session = (SessionImplementor) sf.openSession();
+
+ // TODO: On the next major release, add an
+ // 'isJTA' or 'getTransactionFactory' method to Session.
+ boolean isJTA = session.getTransactionCoordinator()
+ .getTransactionContext().getTransactionEnvironment()
+ .getTransactionFactory()
+ .compatibleWithJtaSynchronization();
+
+ if ( !isJTA ) {
+ // Explicitly handle the transactions only if we're not in
+ // a JTA environment. A lazy loading temporary session can
+ // be created even if a current session and transaction are
+ // open (ex: session.clear() was used). We must prevent
+ // multiple transactions.
+ ( ( Session) session ).beginTransaction();
+ }
try {
target = session.immediateLoad( entityName, id );
@@ -198,6 +214,9 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
finally {
// make sure the just opened temp session gets closed!
try {
+ if ( !isJTA ) {
+ ( ( Session) session ).getTransaction().commit();
+ }
( (Session) session ).close();
}
catch (Exception e) {
@@ -224,12 +243,7 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
protected void prepareForPossibleSpecialSpecjInitialization() {
if ( session != null ) {
- specjLazyLoad =
- Boolean.parseBoolean(
- session.getFactory()
- .getProperties()
- .getProperty( AvailableSettings.ENABLE_LAZY_LOAD_NO_TRANS )
- );
+ specjLazyLoad = session.getFactory().getSettings().isInitializeLazyStateOutsideTransactionsEnabled();
if ( specjLazyLoad && sessionFactoryUuid == null ) {
try {
diff --git a/hibernate-core/src/main/java/org/hibernate/sql/SelectValues.java b/hibernate-core/src/main/java/org/hibernate/sql/SelectValues.java
new file mode 100644
index 0000000000..10921037e4
--- /dev/null
+++ b/hibernate-core/src/main/java/org/hibernate/sql/SelectValues.java
@@ -0,0 +1,121 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.sql;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.jboss.logging.Logger;
+
+import org.hibernate.dialect.Dialect;
+
+/**
+ * Models a SELECT values lists. Eventually, rather than Strings, pass in the Column/Formula representations (something
+ * like {@link org.hibernate.sql.ordering.antlr.ColumnReference}/{@link org.hibernate.sql.ordering.antlr.FormulaReference}
+ *
+ * @author Steve Ebersole
+ */
+public class SelectValues {
+ private static final Logger log = Logger.getLogger( SelectValues.class );
+
+ private static class SelectValue {
+ private final String qualifier;
+ private final String value;
+ private final String alias;
+
+ private SelectValue(String qualifier, String value, String alias) {
+ this.qualifier = qualifier;
+ this.value = value;
+ this.alias = alias;
+ }
+ }
+
+ private final Dialect dialect;
+ private final ArrayList selectValueList = new ArrayList();
+
+ public SelectValues(Dialect dialect) {
+ this.dialect = dialect;
+ }
+
+ public SelectValues addColumns(String qualifier, String[] columnNames, String[] columnAliases) {
+ for ( int i = 0; i < columnNames.length; i++ ) {
+ if ( columnNames[i] != null ) {
+ addColumn( qualifier, columnNames[i], columnAliases[i] );
+ }
+ }
+ return this;
+ }
+
+ public SelectValues addColumn(String qualifier, String columnName, String columnAlias) {
+ selectValueList.add( new SelectValue( qualifier, columnName, columnAlias ) );
+ return this;
+ }
+
+ public SelectValues addParameter(int jdbcTypeCode, int length) {
+ final String selectExpression = dialect.requiresCastingOfParametersInSelectClause()
+ ? dialect.cast( "?", jdbcTypeCode, length )
+ : "?";
+ selectValueList.add( new SelectValue( null, selectExpression, null ) );
+ return this;
+ }
+
+ public SelectValues addParameter(int jdbcTypeCode, int precision, int scale) {
+ final String selectExpression = dialect.requiresCastingOfParametersInSelectClause()
+ ? dialect.cast( "?", jdbcTypeCode, precision, scale )
+ : "?";
+ selectValueList.add( new SelectValue( null, selectExpression, null ) );
+ return this;
+ }
+
+ public String render() {
+ final StringBuilder buf = new StringBuilder( selectValueList.size() * 10 );
+ final HashSet uniqueAliases = new HashSet();
+ boolean firstExpression = true;
+ for ( SelectValue selectValue : selectValueList ) {
+ if ( selectValue.alias != null ) {
+ if ( ! uniqueAliases.add( selectValue.alias ) ) {
+ log.debug( "Skipping select-value with non-unique alias" );
+ continue;
+ }
+ }
+
+ if ( firstExpression ) {
+ firstExpression = false;
+ }
+ else {
+ buf.append( ", " );
+ }
+
+ if ( selectValue.qualifier != null ) {
+ buf.append( selectValue.qualifier ).append( '.' );
+ }
+ buf.append( selectValue.value );
+ if ( selectValue.alias != null ) {
+ buf.append( " as " ).append( selectValue.alias );
+ }
+ }
+ return buf.toString();
+ }
+}
diff --git a/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java b/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java
index 69d114c477..2b74e76872 100644
--- a/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java
+++ b/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java
@@ -377,7 +377,7 @@ public class SchemaExport {
}
public void execute(Target output, Type type) {
- if ( output == Target.NONE || type == SchemaExport.Type.NONE ) {
+ if ( (outputFile == null && output == Target.NONE) || type == SchemaExport.Type.NONE ) {
return;
}
exceptions.clear();
diff --git a/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java b/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java
index 57a70c9d9e..779b623e5f 100644
--- a/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java
+++ b/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java
@@ -152,7 +152,7 @@ public class PojoComponentTuplizer extends AbstractComponentTuplizer {
if ( component == BackrefPropertyAccessor.UNKNOWN ) {
return new Object[propertySpan];
}
- if ( optimizer != null && optimizer.getAccessOptimizer() != null ) {
+ else if ( optimizer != null && optimizer.getAccessOptimizer() != null ) {
return optimizer.getAccessOptimizer().getPropertyValues( component );
}
else {
diff --git a/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java b/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java
index 2dba605cfa..b6653d087a 100644
--- a/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java
+++ b/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java
@@ -49,6 +49,7 @@ import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
* Convenience base class for {@link BasicType} implementations
*
* @author Steve Ebersole
+ * @author Brett Meyer
*/
public abstract class AbstractStandardBasicType
implements BasicType, StringRepresentableType, ProcedureParameterExtractionAware {
@@ -56,8 +57,10 @@ public abstract class AbstractStandardBasicType
private static final Size DEFAULT_SIZE = new Size( 19, 2, 255, Size.LobMultiplier.NONE ); // to match legacy behavior
private final Size dictatedSize = new Size();
- private final SqlTypeDescriptor sqlTypeDescriptor;
- private final JavaTypeDescriptor javaTypeDescriptor;
+ // Don't use final here. Need to initialize after-the-fact
+ // by DynamicParameterizedTypes.
+ private SqlTypeDescriptor sqlTypeDescriptor;
+ private JavaTypeDescriptor javaTypeDescriptor;
public AbstractStandardBasicType(SqlTypeDescriptor sqlTypeDescriptor, JavaTypeDescriptor javaTypeDescriptor) {
this.sqlTypeDescriptor = sqlTypeDescriptor;
@@ -113,17 +116,24 @@ public abstract class AbstractStandardBasicType
protected Size getDictatedSize() {
return dictatedSize;
}
-
-
+
// final implementations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
public final JavaTypeDescriptor getJavaTypeDescriptor() {
return javaTypeDescriptor;
}
+
+ public final void setJavaTypeDescriptor( JavaTypeDescriptor javaTypeDescriptor ) {
+ this.javaTypeDescriptor = javaTypeDescriptor;
+ }
public final SqlTypeDescriptor getSqlTypeDescriptor() {
return sqlTypeDescriptor;
}
+
+ public final void setSqlTypeDescriptor( SqlTypeDescriptor sqlTypeDescriptor ) {
+ this.sqlTypeDescriptor = sqlTypeDescriptor;
+ }
public final Class getReturnedClass() {
return javaTypeDescriptor.getJavaTypeClass();
@@ -234,24 +244,7 @@ public abstract class AbstractStandardBasicType
}
public final T nullSafeGet(ResultSet rs, String name, final SessionImplementor session) throws SQLException {
- // todo : have SessionImplementor extend WrapperOptions
- final WrapperOptions options = new WrapperOptions() {
- public boolean useStreamForLobBinding() {
- return Environment.useStreamsForBinary();
- }
-
- public LobCreator getLobCreator() {
- return Hibernate.getLobCreator( session );
- }
-
- public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) {
- final SqlTypeDescriptor remapped = sqlTypeDescriptor.canBeRemapped()
- ? session.getFactory().getDialect().remapSqlTypeDescriptor( sqlTypeDescriptor )
- : sqlTypeDescriptor;
- return remapped == null ? sqlTypeDescriptor : remapped;
- }
- };
-
+ final WrapperOptions options = getOptions(session);
return nullSafeGet( rs, name, options );
}
@@ -269,24 +262,7 @@ public abstract class AbstractStandardBasicType
Object value,
int index,
final SessionImplementor session) throws SQLException {
- // todo : have SessionImplementor extend WrapperOptions
- final WrapperOptions options = new WrapperOptions() {
- public boolean useStreamForLobBinding() {
- return Environment.useStreamsForBinary();
- }
-
- public LobCreator getLobCreator() {
- return Hibernate.getLobCreator( session );
- }
-
- public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) {
- final SqlTypeDescriptor remapped = sqlTypeDescriptor.canBeRemapped()
- ? session.getFactory().getDialect().remapSqlTypeDescriptor( sqlTypeDescriptor )
- : sqlTypeDescriptor;
- return remapped == null ? sqlTypeDescriptor : remapped;
- }
- };
-
+ final WrapperOptions options = getOptions(session);
nullSafeSet( st, value, index, options );
}
@@ -375,24 +351,7 @@ public abstract class AbstractStandardBasicType
@Override
public T extract(CallableStatement statement, int startIndex, final SessionImplementor session) throws SQLException {
- // todo : have SessionImplementor extend WrapperOptions
- final WrapperOptions options = new WrapperOptions() {
- public boolean useStreamForLobBinding() {
- return Environment.useStreamsForBinary();
- }
-
- public LobCreator getLobCreator() {
- return Hibernate.getLobCreator( session );
- }
-
- public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) {
- final SqlTypeDescriptor remapped = sqlTypeDescriptor.canBeRemapped()
- ? session.getFactory().getDialect().remapSqlTypeDescriptor( sqlTypeDescriptor )
- : sqlTypeDescriptor;
- return remapped == null ? sqlTypeDescriptor : remapped;
- }
- };
-
+ final WrapperOptions options = getOptions(session);
return remapSqlTypeDescriptor( options ).getExtractor( javaTypeDescriptor ).extract(
statement,
startIndex,
@@ -402,10 +361,16 @@ public abstract class AbstractStandardBasicType
@Override
public T extract(CallableStatement statement, String[] paramNames, final SessionImplementor session) throws SQLException {
- // todo : have SessionImplementor extend WrapperOptions
- final WrapperOptions options = new WrapperOptions() {
+ final WrapperOptions options = getOptions(session);
+ return remapSqlTypeDescriptor( options ).getExtractor( javaTypeDescriptor ).extract( statement, paramNames, options );
+ }
+
+ // TODO : have SessionImplementor extend WrapperOptions
+ private WrapperOptions getOptions(final SessionImplementor session) {
+ return new WrapperOptions() {
public boolean useStreamForLobBinding() {
- return Environment.useStreamsForBinary();
+ return Environment.useStreamsForBinary()
+ || session.getFactory().getDialect().useInputStreamToInsertBlob();
}
public LobCreator getLobCreator() {
@@ -419,7 +384,5 @@ public abstract class AbstractStandardBasicType
return remapped == null ? sqlTypeDescriptor : remapped;
}
};
-
- return remapSqlTypeDescriptor( options ).getExtractor( javaTypeDescriptor ).extract( statement, paramNames, options );
}
}
diff --git a/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java b/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java
index 4f6d8c9cc8..6cc3aff172 100644
--- a/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java
+++ b/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java
@@ -29,33 +29,38 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-
-import org.dom4j.Element;
-import org.dom4j.Node;
+import java.util.SortedMap;
+import java.util.TreeMap;
import org.hibernate.EntityMode;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.collection.spi.PersistentCollection;
+import org.hibernate.engine.spi.CollectionEntry;
import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
+import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.MarkerObject;
import org.hibernate.internal.util.collections.ArrayHelper;
+import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.spi.relational.Size;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Joinable;
+import org.hibernate.pretty.MessageHelper;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.proxy.LazyInitializer;
+import org.jboss.logging.Logger;
/**
* A type that handles Hibernate PersistentCollections (including arrays).
@@ -64,6 +69,8 @@ import org.hibernate.proxy.LazyInitializer;
*/
public abstract class CollectionType extends AbstractType implements AssociationType {
+ private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, CollectionType.class.getName());
+
private static final Object NOT_NULL_COLLECTION = new MarkerObject( "NOT NULL COLLECTION" );
public static final Object UNFETCHED_COLLECTION = new MarkerObject( "UNFETCHED COLLECTION" );
@@ -508,12 +515,92 @@ public abstract class CollectionType extends AbstractType implements Association
if ( ! ( ( PersistentCollection ) original ).isDirty() ) {
( ( PersistentCollection ) result ).clearDirty();
}
+
+ if ( elemType instanceof AssociationType ) {
+ preserveSnapshot( (PersistentCollection) original,
+ (PersistentCollection) result,
+ (AssociationType) elemType, owner, copyCache,
+ session );
+ }
}
}
return result;
}
+ private void preserveSnapshot(PersistentCollection original,
+ PersistentCollection result, AssociationType elemType,
+ Object owner, Map copyCache, SessionImplementor session) {
+ Serializable originalSnapshot = original.getStoredSnapshot();
+ Serializable resultSnapshot = result.getStoredSnapshot();
+ Serializable targetSnapshot;
+
+ if ( originalSnapshot instanceof List ) {
+ targetSnapshot = new ArrayList(
+ ( (List) originalSnapshot ).size() );
+ for ( Object obj : (List) originalSnapshot ) {
+ ( (List) targetSnapshot ).add( elemType.replace(
+ obj, null, session, owner, copyCache ) );
+ }
+
+ }
+ else if ( originalSnapshot instanceof Map ) {
+ if ( originalSnapshot instanceof SortedMap ) {
+ targetSnapshot = new TreeMap(
+ ( (SortedMap) originalSnapshot ).comparator() );
+ }
+ else {
+ targetSnapshot = new HashMap(
+ CollectionHelper.determineProperSizing(
+ ( (Map) originalSnapshot ).size() ),
+ CollectionHelper.LOAD_FACTOR );
+ }
+
+ for ( Map.Entry