diff --git a/.gitignore b/.gitignore index adced34f0d..164c5fcfab 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,10 @@ bin # Miscellaneous *.log .clover + +# JBoss Transactions +ObjectStore + +# Profiler and heap dumps +*.jps +*.hprof diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..87d82fdf71 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,45 @@ +Guidelines for Contributing +==== +Contributions from the community are essential in keeping Hibernate (any Open Source +project really) strong and successful. While we try to keep requirements for +contributing to a minimum, there are a few guidelines we ask that you mind. + +## Getting Started +If you are just getting started with Git, GitHub and/or contributing to Hibernate via +GitHub there are a few pre-requisite steps. + +* Make sure you have a [Hibernate Jira account](https://hibernate.onjira.com) +* Make sure you have a [GitHub account](https://github.com/signup/free) +* [Fork](http://help.github.com/fork-a-repo) the Hibernate repository. As discussed in +the linked page, this also includes: + * [Set](https://help.github.com/articles/set-up-git) up your local git install + * Clone your fork + + +## Create the working (topic) branch +Create a "topic" branch on which you will work. The convention is to name the branch +using the JIRA issue key. If there is not already a Jira issue covering the work you +want to do, create one. Assuming you will be working from the master branch and working +on the Jira HHH-123 : `git checkout -b HHH-123 master` + + +## Code +Do yo thang! + +## Commit + +* Make commits of logical units. +* Be sure to use the JIRA issue key in the commit message. This is how Jira will pick +up the related commits and display them on the Jira issue. +* Make sure you have added the necessary tests for your changes. +* Run _all_ the tests to assure nothing else was accidentally broken. + +_Prior to commiting, if you want to pull in the latest upstream changes (highly +appreciated btw), please use rebasing rather than merging. Merging creates +"merge commits" that really muck up the project timeline._ + +## Submit +* Sign the [Contributor License Agreement](https://cla.jboss.org/index.seam). +* Push your changes to a topic branch in your fork of the repository. +* Initiate a [pull request](http://help.github.com/send-pull-requests/) +* Update the Jira issue, adding a comment inclusing a link to the created pull request diff --git a/README.md b/README.md index eb80686278..e7d3e9d98a 100644 --- a/README.md +++ b/README.md @@ -38,8 +38,12 @@ Executing Tasks Gradle uses the concept of build tasks (equivalent to Ant targets). You can get a list of available tasks via - gradle --tasks - + gradle tasks + +or if using gradle wrapper + + ./gradlew tasks + ### Executing Tasks Across All Modules To execute a task across all modules, simply perform that task from the root directory. Gradle will visit each diff --git a/build.gradle b/build.gradle index 610a33bb85..bfc15ca270 100644 --- a/build.gradle +++ b/build.gradle @@ -179,6 +179,10 @@ subprojects { subProject -> systemProperty entry.key, entry.value } } + } + test { + systemProperties['hibernate.test.validatefailureexpected'] = true + systemProperties += System.properties.findAll { it.key.startsWith( "hibernate.") } maxHeapSize = "1024m" } diff --git a/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy b/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy index 5cc4cc3988..0b86ce7b0d 100644 --- a/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy +++ b/buildSrc/src/main/groovy/org/hibernate/build/qalab/DatabaseAllocator.groovy @@ -54,7 +54,7 @@ class DatabaseAllocator { "postgresql82", "postgresql83", "postgresql84", "postgresql91", "mysql50", "mysql51","mysql55", "db2-91", "db2-97", - "mssql2005", "mssql2008R1", "mssql2008R2", + "mssql2005", "mssql2008R1", "mssql2008R2", "mssql2012", "sybase155", "sybase157" ]; @@ -97,4 +97,4 @@ class DatabaseAllocator { } return (DatabaseAllocator) project.rootProject.properties[ DB_ALLOCATOR_KEY ]; } -} \ No newline at end of file +} diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/Fetching.xml b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/Fetching.xml new file mode 100644 index 0000000000..366979eb90 --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/Fetching.xml @@ -0,0 +1,228 @@ + + + + + Fetching + + + Fetching, essentially, is the process of grabbing data from the database and making it available to the + application. Tuning how an application does fetching is one of the biggest factors in determining how an + application will perform. Fetching too much data, in terms of width (values/columns) and/or + depth (results/rows), adds unnecessary overhead in terms of both JDBC communication and ResultSet processing. + Fetching too little data causes additional fetches to be needed. Tuning how an application + fetches data presents a great opportunity to influence the application's overall performance. + + +
+ The basics + + + The concept of fetching breaks down into two different questions. + + + + When should the data be fetched? Now? Later? + + + + + How should the data be fetched? + + + + + + + + "now" is generally termed eager or immediate. "later" is + generally termed lazy or delayed. + + + + + There are a number of scopes for defining fetching: + + + + static - Static definition of fetching strategies is done in the + mappings. The statically-defined fetch strategies is used in the absence of any dynamically + defined strategies Except in the case of HQL/JPQL; see xyz. + + + + + dynamic (sometimes referred to as runtime) - Dynamic definition is + really use-case centric. There are 2 main ways to define dynamic fetching: + + + + + fetch profiles - defined in mappings, but can be + enabled/disabled on the Session. + + + + + HQL/JPQL and both Hibernate and JPA Criteria queries have the ability to specify + fetching, specific to said query. + + + + + + + + + The strategies + + SELECT + + + Performs a separate SQL select to load the data. This can either be EAGER (the second select + is issued immediately) or LAZY (the second select is delayed until the data is needed). This + is the strategy generally termed N+1. + + + + + JOIN + + + Inherently an EAGER style of fetching. The data to be fetched is obtained through the use of + an SQL join. + + + + + BATCH + + + Performs a separate SQL select to load a number of related data items using an + IN-restriction as part of the SQL WHERE-clause based on a batch size. Again, this can either + be EAGER (the second select is issued immediately) or LAZY (the second select is delayed until + the data is needed). + + + + + SUBSELECT + + + Performs a separate SQL select to load associated data based on the SQL restriction used to + load the owner. Again, this can either be EAGER (the second select is issued immediately) + or LAZY (the second select is delayed until the data is needed). + + + + +
+ +
+ Applying fetch strategies + + + Let's consider these topics as it relates to an simple domain model and a few use cases. + + + + Sample domain model + + + + + + + + The Hibernate recommendation is to statically mark all associations lazy and to use dynamic fetching + strategies for eagerness. This is unfortunately at odds with the JPA specification which defines that + all one-to-one and many-to-one associations should be eagerly fetched by default. Hibernate, as a JPA + provider honors that default. + + + +
+ No fetching + The login use-case + + For the first use case, consider the application's login process for an Employee. Lets assume that + login only requires access to the Employee information, not Project nor Department information. + + + + No fetching example + + + + + In this example, the application gets the Employee data. However, because all associations from + Employee are declared as LAZY (JPA defines the default for collections as LAZY) no other data is + fetched. + + + + If the login process does not need access to the Employee information specifically, another + fetching optimization here would be to limit the width of the query results. + + + + No fetching (scalar) example + + +
+ +
+ Dynamic fetching via queries + The projects for an employee use-case + + + For the second use case, consider a screen displaying the Projects for an Employee. Certainly access + to the Employee is needed, as is the collection of Projects for that Employee. Information + about Departments, other Employees or other Projects is not needed. + + + + Dynamic query fetching example + + + + + + In this example we have an Employee and their Projects loaded in a single query shown both as an HQL + query and a JPA Criteria query. In both cases, this resolves to exactly one database query to get + all that information. + +
+ +
+ Dynamic fetching via profiles + The projects for an employee use-case using natural-id + + + Suppose we wanted to leverage loading by natural-id to obtain the Employee information in the + "projects for and employee" use-case. Loading by natural-id uses the statically defined fetching + strategies, but does not expose a means to define load-specific fetching. So we would leverage a + fetch profile. + + + + Fetch profile example + + + + + + Here the Employee is obtained by natural-id lookup and the Employee's Project data is fetched eagerly. + If the Employee data is resolved from cache, the Project data is resolved on its own. However, + if the Employee data is not resolved in cache, the Employee and Project data is resolved in one + SQL query via join as we saw above. + +
+
+ + + + +
\ No newline at end of file diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Department.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Department.java new file mode 100644 index 0000000000..06eab06a51 --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Department.java @@ -0,0 +1,10 @@ +@Entity +public class Department { + @Id + private Long id; + + @OneToMany(mappedBy="department") + private List employees; + + ... +} \ No newline at end of file diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Employee.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Employee.java new file mode 100644 index 0000000000..62ed3e54e0 --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Employee.java @@ -0,0 +1,24 @@ +@Entity +public class Employee { + @Id + private Long id; + + @NaturalId + private String userid; + + @Column( name="pswd" ) + @ColumnTransformer( read="decrypt(pswd)" write="encrypt(?)" ) + private String password; + + private int accessLevel; + + @ManyToOne( fetch=LAZY ) + @JoinColumn + private Department department; + + @ManyToMany(mappedBy="employees") + @JoinColumn + private Set projects; + + ... +} \ No newline at end of file diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/FetchOverrides.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/FetchOverrides.java new file mode 100644 index 0000000000..4144ea27dc --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/FetchOverrides.java @@ -0,0 +1,10 @@ +@FetchProfile( + name="employee.projects", + fetchOverrides={ + @FetchOverride( + entity=Employee.class, + association="projects", + mode=JOIN + ) + } +) \ No newline at end of file diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Login.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Login.java new file mode 100644 index 0000000000..f916bb847a --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Login.java @@ -0,0 +1,4 @@ +String loginHql = "select e from Employee e where e.userid = :userid and e.password = :password"; +Employee employee = (Employee) session.createQuery( loginHql ) + ... + .uniqueResult(); diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/LoginScalar.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/LoginScalar.java new file mode 100644 index 0000000000..8905b0ce4a --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/LoginScalar.java @@ -0,0 +1,4 @@ +String loginHql = "select e.accessLevel from Employee e where e.userid = :userid and e.password = :password"; +Employee employee = (Employee) session.createQuery( loginHql ) + ... + .uniqueResult(); diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Project.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Project.java new file mode 100644 index 0000000000..94fe42c0d5 --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/Project.java @@ -0,0 +1,10 @@ +@Entity +public class Project { + @Id + private Long id; + + @ManyToMany + private Set employees; + + ... +} \ No newline at end of file diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeCriteria.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeCriteria.java new file mode 100644 index 0000000000..384d964e07 --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeCriteria.java @@ -0,0 +1,10 @@ +String userid = ...; +CriteriaBuilder cb = entityManager.getCriteriaBuilder(); +CriteriaQuery criteria = cb.createQuery( Employee.class ); +Root root = criteria.from( Employee.class ); +root.fetch( Employee_.projects ); +criteria.select( root ); +criteria.where( + cb.equal( root.get( Employee_.userid ), cb.literal( userid ) ) +); +Employee e = entityManager.createQuery( criteria ).getSingleResult(); diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeFetchProfile.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeFetchProfile.java new file mode 100644 index 0000000000..297cb8cfc6 --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeFetchProfile.java @@ -0,0 +1,4 @@ +String userid = ...; +session.enableFetchProfile( "employee.projects" ); +Employee e = (Employee) session.bySimpleNaturalId( Employee.class ) + .load( userid ); \ No newline at end of file diff --git a/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeHql.java b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeHql.java new file mode 100644 index 0000000000..11235281d0 --- /dev/null +++ b/documentation/src/main/docbook/devguide/en-US/chapters/fetching/extras/ProjectsForAnEmployeeHql.java @@ -0,0 +1,5 @@ +String userid = ...; +String hql = "select e from Employee e join fetch e.projects where e.userid = :userid"; +Employee e = (Employee) session.createQuery( hql ) + .setParameter( "userid", userid ) + .uniqueResult(); diff --git a/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java b/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java index 43d9b5acdb..f8917f4653 100644 --- a/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java +++ b/hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java @@ -72,7 +72,7 @@ public class BulkOperationCleanupAction implements Executable, Serializable { * @param session The session to which this request is tied. * @param affectedQueryables The affected entity persisters. */ - public BulkOperationCleanupAction(SessionImplementor session, Queryable[] affectedQueryables) { + public BulkOperationCleanupAction(SessionImplementor session, Queryable... affectedQueryables) { SessionFactoryImplementor factory = session.getFactory(); LinkedHashSet spacesList = new LinkedHashSet(); for ( Queryable persister : affectedQueryables ) { diff --git a/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java b/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java index 52c4795d92..31c565ce15 100644 --- a/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java +++ b/hibernate-core/src/main/java/org/hibernate/action/internal/CollectionUpdateAction.java @@ -73,7 +73,8 @@ public final class CollectionUpdateAction extends CollectionAction { if (affectedByFilters) { throw new HibernateException( "cannot recreate collection while filter is enabled: " + - MessageHelper.collectionInfoString( persister, id, persister.getFactory() ) + MessageHelper.collectionInfoString(persister, collection, + id, session ) ); } if ( !emptySnapshot ) persister.remove( id, session ); diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java b/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java index ddb2703690..af2e72b8ca 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/AccessType.java @@ -36,9 +36,13 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME; * Prefer the standard {@link javax.persistence.Access} annotation * * @author Emmanuel Bernard + * + * @deprecated Use {@link AttributeAccessor} instead; renamed to avoid confusion with the JPA + * {@link javax.persistence.AccessType} enum. */ @Target({ TYPE, METHOD, FIELD }) @Retention(RUNTIME) +@Deprecated public @interface AccessType { String value(); } diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Any.java b/hibernate-core/src/main/java/org/hibernate/annotations/Any.java index 8ab0e1a308..7ec5a179f5 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/Any.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/Any.java @@ -31,11 +31,32 @@ import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Define a ToOne association pointing to several entity types. - * Matching the according entity type is doe through a metadata discriminator column - * This kind of mapping should be only marginal. - * + * Defines a ToOne-style association pointing to one of several entity types depending on a local discriminator, + * as opposed to discriminated inheritance where the discriminator is kept as part of the entity hierarchy. + * + * For example, if you consider an Order entity containing Payment information where Payment might be of type + * CashPayment or CreditCardPayment the @Any approach would be to keep that discriminator and matching value on the + * Order itself. Thought of another way, the "foreign-key" really is made up of the value and discriminator + * (there is no physical foreign key here as databases do not support this): + *
+ *    @Entity
+ *    class Order {
+ *        ...
+ *        @Any( metaColumn = @Column( name="payment_type" ) )
+ *        @AnyMetDef(
+ *                idType = "long"
+ *                metaValues = {
+ *                        @MetaValue( value="C", targetEntity=CashPayment.class ),
+ *                        @MetaValue( value="CC", targetEntity=CreditCardPayment.class ),
+ *                }
+ *        )
+ *        pubic Payment getPayment() { ... }
+ *    }
+ * }
+ * 
+ * * @author Emmanuel Bernard + * @author Steve Ebersole */ @java.lang.annotation.Target({METHOD, FIELD}) @Retention(RUNTIME) @@ -48,10 +69,10 @@ public @interface Any { String metaDef() default ""; /** - * Metadata discriminator column description, This column will hold the meta value corresponding to the - * targeted entity. + * Identifies the discriminator column. This column will hold the value that identifies the targeted entity. */ Column metaColumn(); + /** * Defines whether the value of the field or property should be lazily loaded or must be * eagerly fetched. The EAGER strategy is a requirement on the persistence provider runtime diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java index cc50fa9425..e7cac8a031 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDef.java @@ -31,9 +31,12 @@ import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Defines @Any and @manyToAny metadata + * Used to provide metadata about an {@link Any} or {@link ManyToAny} mapping. + * + * @see AnyMetaDefs * * @author Emmanuel Bernard + * @author Steve Ebersole */ @java.lang.annotation.Target( { PACKAGE, TYPE, METHOD, FIELD } ) @Retention( RUNTIME ) @@ -45,18 +48,18 @@ public @interface AnyMetaDef { String name() default ""; /** - * meta discriminator Hibernate type + * Names the discriminator Hibernate Type for this Any/ManyToAny mapping. The default is to use + * {@link org.hibernate.type.StringType} */ String metaType(); /** - * Hibernate type of the id column - * @return Hibernate type of the id column + * Names the identifier Hibernate Type for the entity associated through this Any/ManyToAny mapping. */ String idType(); /** - * Matching discriminator values with their respective entity + * Maps discriminator values to the matching corresponding entity types. */ MetaValue[] metaValues(); } diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java index 268418bd0e..0de690efa7 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/AnyMetaDefs.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.annotations; + import java.lang.annotation.Retention; import static java.lang.annotation.ElementType.PACKAGE; @@ -29,10 +30,10 @@ import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Defines @Any and @ManyToAny set of metadata. - * Can be defined at the entity level or the package level + * Used to group together {@link AnyMetaDef} annotations. Can be defined at the entity or package level * * @author Emmanuel Bernard + * @author Steve Ebersole */ @java.lang.annotation.Target( { PACKAGE, TYPE } ) @Retention( RUNTIME ) diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/AttributeAccessor.java b/hibernate-core/src/main/java/org/hibernate/annotations/AttributeAccessor.java new file mode 100644 index 0000000000..32991f4865 --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/annotations/AttributeAccessor.java @@ -0,0 +1,61 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.annotations; + +import java.lang.annotation.Retention; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.TYPE; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +/** + * Names a {@link org.hibernate.property.PropertyAccessor} strategy to use. + * + * Can be specified at either:
    + *
  • + * TYPE level, which will act as naming the default accessor strategy for + * all attributes on the class which do not explicitly name an accessor strategy + *
  • + *
  • + * METHOD/FIELD level, which will be in effect for just that attribute. + *
  • + *
+ * + * Should only be used to name custom {@link org.hibernate.property.PropertyAccessor}. For {@code property/field} + * access, the JPA {@link javax.persistence.Access} annotation should be preferred using the appropriate + * {@link javax.persistence.AccessType}. However, if this annotation is used with either {@code value="property"} + * or {@code value="field"}, it will act just as the corresponding usage of {@link javax.persistence.Access}. + * + * @author Steve Ebersole + * @author Emmanuel Bernard + */ +@java.lang.annotation.Target({ TYPE, METHOD, FIELD }) +@Retention(RUNTIME) +public @interface AttributeAccessor { + /** + * Names the {@link org.hibernate.property.PropertyAccessor} strategy + */ + String value(); +} diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java b/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java index 0219574aaf..414af5199c 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.annotations; + import java.lang.annotation.Retention; import java.lang.annotation.Target; @@ -31,13 +32,31 @@ import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Batch size for SQL loading + * Defines size for batch loading of collections or lazy entities. For example... + *
+ *     @Entity
+ *     @BatchSize(size=100)
+ *     class Product {
+ *         ...
+ *     }
+ * 
+ * will initialize up to 100 lazy Product entity proxies at a time. + * + *
+ *     	@OneToMany
+ *     	@BatchSize(size = 5) /
+ *     	Set getProducts() { ... };
+ * 
+ * will initialize up to 5 lazy collections of products at a time * * @author Emmanuel Bernard + * @author Steve Ebersole */ @Target({TYPE, METHOD, FIELD}) @Retention(RUNTIME) public @interface BatchSize { - /** Strictly positive integer */ + /** + * Strictly positive integer + */ int size(); } diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java b/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java index 2e3762292e..406adb7948 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/Cascade.java @@ -30,7 +30,12 @@ import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Apply a cascade strategy on an association + * Apply a cascade strategy on an association. Used to apply Hibernate specific cascades. For JPA cascading, prefer + * using {@link javax.persistence.CascadeType} on {@link javax.persistence.OneToOne}, + * {@link javax.persistence.OneToMany}, etc. Hibernate will merge together both sets of cascades. + * + * @author Emmanuel Bernard + * @author Steve Ebersole */ @Target({METHOD, FIELD}) @Retention(RUNTIME) diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java b/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java index 0cf7b204fd..2767b824c2 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/CascadeType.java @@ -25,7 +25,7 @@ package org.hibernate.annotations; /** - * Cascade types (can override default EJB3 cascades + * Cascade types (can override default JPA cascades */ public enum CascadeType { ALL, diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Check.java b/hibernate-core/src/main/java/org/hibernate/annotations/Check.java index 068722cada..8230af6e35 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/Check.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/Check.java @@ -32,8 +32,7 @@ import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Arbitrary SQL check constraints which can be defined at the class, - * property or collection level + * Arbitrary SQL CHECK constraints which can be defined at the class, property or collection level * * @author Emmanuel Bernard */ diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java b/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java index 199ab14ff9..5bbe42d538 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/CollectionType.java @@ -30,7 +30,8 @@ import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Names a custom collection type for a persistent collection. + * Names a custom collection type for a persistent collection. The collection can also name a @Type, which defines + * the Hibernate Type of the collection elements. * * @see org.hibernate.type.CollectionType * @see org.hibernate.usertype.UserCollectionType diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java b/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java index 810d2b5dbf..0d9c1c4486 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/ColumnTransformer.java @@ -34,7 +34,9 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME; * The write expression must contain exactly one '?' placeholder for the value. * * For example: read="decrypt(credit_card_num)" write="encrypt(?)" - * + * + * @see ColumnTransformers + * * @author Emmanuel Bernard */ @java.lang.annotation.Target({FIELD,METHOD}) diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java b/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java index 6761631fa9..c272298480 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/DiscriminatorFormula.java @@ -29,11 +29,15 @@ import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Discriminator formula - * To be placed at the root entity. + * Used to apply a Hibernate formula (derived value) as the inheritance discriminator "column". Used in place of + * the JPA {@link javax.persistence.DiscriminatorColumn} when a formula is wanted. + * + * To be placed on the root entity. + * + * @see Formula * * @author Emmanuel Bernard - * @see Formula + * @author Steve Ebersole */ @Target({TYPE}) @Retention(RUNTIME) diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java b/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java index 73a496276e..813d8fd94b 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/Formula.java @@ -30,10 +30,34 @@ import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Formula. To be used as a replacement for @Column in most places - * The formula has to be a valid SQL fragment + * Defines a formula (derived value) which is a SQL fragment that acts as a @Column alternative in most cases. + * Represents read-only state. + * + * In certain cases @ColumnTransformer might be a better option, especially as it leaves open the option of still + * being writable. + * + *
+ *     // perform calculations
+ *     @Formula( "sub_total + (sub_total * tax)" )
+ *     long getTotalCost() { ... }
+ * 
+ * + *
+ *     // call functions
+ *     @Formula( "upper( substring( middle_name, 1 ) )" )
+ *     Character getMiddleInitial() { ... }
+ * 
+ * + *
+ *     // this might be better handled through @ColumnTransformer
+ *     @Formula( "decrypt(credit_card_num)" )
+ *     String getCreditCardNumber() { ... }
+ * 
+ * + * @see ColumnTransformer * * @author Emmanuel Bernard + * @author Steve Ebersole */ @Target({METHOD, FIELD}) @Retention(RUNTIME) diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java b/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java index bb20cc1e00..6fae415eb1 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/ManyToAny.java @@ -22,20 +22,23 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.annotations; -import java.lang.annotation.Retention; + import javax.persistence.Column; import javax.persistence.FetchType; +import java.lang.annotation.Retention; import static java.lang.annotation.ElementType.FIELD; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** - * Defined a ToMany association pointing to different entity types. - * Matching the according entity type is doe through a metadata discriminator column - * This kind of mapping should be only marginal. + * This is the collection-valued form of @Any definitions. Defines a ToMany-style association pointing + * to one of several entity types depending on a local discriminator. See {@link Any} for further information. + * + * @see Any * * @author Emmanuel Bernard + * @author Steve Ebersole */ @java.lang.annotation.Target({METHOD, FIELD}) @Retention(RUNTIME) diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java b/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java index d6fa60c89e..e9428b019d 100644 --- a/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java +++ b/hibernate-core/src/main/java/org/hibernate/annotations/MetaValue.java @@ -23,10 +23,13 @@ */ package org.hibernate.annotations; - /** - * Represent a discriminator value associated to a given entity type + * Maps a given discriminator value to the corresponding entity type. See {@link Any} for more information. + * + * @see Any + * * @author Emmanuel Bernard + * @author Steve Ebersole */ public @interface MetaValue { /** diff --git a/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java b/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java index 6db8a97db5..7d9eeeb36e 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java @@ -92,6 +92,10 @@ public class StandardServiceRegistryBuilder { return initiators; } + public BootstrapServiceRegistry getBootstrapServiceRegistry() { + return bootstrapServiceRegistry; + } + /** * Read settings from a {@link Properties} file. Differs from {@link #configure()} and {@link #configure(String)} * in that here we read a {@link Properties} file while for {@link #configure} we read the XML variant. @@ -224,6 +228,15 @@ public class StandardServiceRegistryBuilder { } } + /** + * Temporarily exposed since Configuration is still around and much code still uses Configuration. This allows + * code to configure the builder and access that to configure Configuration object (used from HEM atm). + */ + @Deprecated + public Map getSettings() { + return settings; + } + /** * Destroy a service registry. Applications should only destroy registries they have explicitly created. * diff --git a/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java b/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java index 6404478a06..d24b50f3f1 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java @@ -94,6 +94,9 @@ import org.hibernate.engine.transaction.jta.platform.internal.WebSphereJtaPlatfo import org.hibernate.engine.transaction.jta.platform.internal.WeblogicJtaPlatform; import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; import org.hibernate.engine.transaction.spi.TransactionFactory; +import org.hibernate.hql.spi.MultiTableBulkIdStrategy; +import org.hibernate.hql.spi.PersistentTableBulkIdStrategy; +import org.hibernate.hql.spi.TemporaryTableBulkIdStrategy; /** * @author Steve Ebersole @@ -131,6 +134,7 @@ public class StrategySelectorBuilder { addDialects( strategySelector ); addJtaPlatforms( strategySelector ); addTransactionFactories( strategySelector ); + addMultiTableBulkIdStrategies( strategySelector ); // apply auto-discovered registrations for ( AvailabilityAnnouncer announcer : classLoaderService.loadJavaServices( AvailabilityAnnouncer.class ) ) { @@ -327,4 +331,17 @@ public class StrategySelectorBuilder { strategySelector.registerStrategyImplementor( TransactionFactory.class, CMTTransactionFactory.SHORT_NAME, CMTTransactionFactory.class ); strategySelector.registerStrategyImplementor( TransactionFactory.class, "org.hibernate.transaction.CMTTransactionFactory", CMTTransactionFactory.class ); } + + private void addMultiTableBulkIdStrategies(StrategySelectorImpl strategySelector) { + strategySelector.registerStrategyImplementor( + MultiTableBulkIdStrategy.class, + PersistentTableBulkIdStrategy.SHORT_NAME, + PersistentTableBulkIdStrategy.class + ); + strategySelector.registerStrategyImplementor( + MultiTableBulkIdStrategy.class, + TemporaryTableBulkIdStrategy.SHORT_NAME, + TemporaryTableBulkIdStrategy.class + ); + } } diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java b/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java index e3c4bd8513..1182ed9d18 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/buildtime/internal/JavassistInstrumenter.java @@ -25,13 +25,9 @@ package org.hibernate.bytecode.buildtime.internal; import java.io.ByteArrayInputStream; import java.io.DataInputStream; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.util.Set; -import javassist.ClassClassPath; -import javassist.ClassPool; import javassist.bytecode.ClassFile; import org.hibernate.bytecode.buildtime.spi.AbstractInstrumenter; @@ -48,7 +44,6 @@ import org.hibernate.bytecode.spi.ClassTransformer; * * @author Steve Ebersole * @author Muga Nishizawa - * @author Dustin Schultz */ public class JavassistInstrumenter extends AbstractInstrumenter { @@ -75,20 +70,6 @@ public class JavassistInstrumenter extends AbstractInstrumenter { return provider.getTransformer( CLASS_FILTER, new CustomFieldFilter( descriptor, classNames ) ); } } - - @Override - public void execute(Set files) { - ClassPool cp = ClassPool.getDefault(); - cp.insertClassPath(new ClassClassPath(this.getClass())); - try { - for (File file : files) { - cp.makeClass(new FileInputStream(file)); - } - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - super.execute(files); - } private static class CustomClassDescriptor implements ClassDescriptor { private final byte[] bytes; diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java b/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java index 300238fee4..2e547c3627 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/internal/javassist/FieldTransformer.java @@ -32,7 +32,6 @@ import java.util.Iterator; import java.util.List; import javassist.CannotCompileException; -import javassist.ClassPool; import javassist.bytecode.AccessFlag; import javassist.bytecode.BadBytecode; import javassist.bytecode.Bytecode; @@ -44,8 +43,6 @@ import javassist.bytecode.Descriptor; import javassist.bytecode.FieldInfo; import javassist.bytecode.MethodInfo; import javassist.bytecode.Opcode; -import javassist.bytecode.StackMapTable; -import javassist.bytecode.stackmap.MapMaker; /** * The thing that handles actual class enhancement in regards to @@ -53,7 +50,6 @@ import javassist.bytecode.stackmap.MapMaker; * * @author Muga Nishizawa * @author Steve Ebersole - * @author Dustin Schultz */ public class FieldTransformer { @@ -134,7 +130,7 @@ public class FieldTransformer { } private void addGetFieldHandlerMethod(ClassFile classfile) - throws CannotCompileException, BadBytecode { + throws CannotCompileException { ConstPool cp = classfile.getConstPool(); int this_class_index = cp.getThisClassInfo(); MethodInfo minfo = new MethodInfo(cp, GETFIELDHANDLER_METHOD_NAME, @@ -152,13 +148,11 @@ public class FieldTransformer { code.addOpcode(Opcode.ARETURN); minfo.setCodeAttribute(code.toCodeAttribute()); minfo.setAccessFlags(AccessFlag.PUBLIC); - StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo); - minfo.getCodeAttribute().setAttribute(smt); classfile.addMethod(minfo); } private void addSetFieldHandlerMethod(ClassFile classfile) - throws CannotCompileException, BadBytecode { + throws CannotCompileException { ConstPool cp = classfile.getConstPool(); int this_class_index = cp.getThisClassInfo(); MethodInfo minfo = new MethodInfo(cp, SETFIELDHANDLER_METHOD_NAME, @@ -178,8 +172,6 @@ public class FieldTransformer { code.addOpcode(Opcode.RETURN); minfo.setCodeAttribute(code.toCodeAttribute()); minfo.setAccessFlags(AccessFlag.PUBLIC); - StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo); - minfo.getCodeAttribute().setAttribute(smt); classfile.addMethod(minfo); } @@ -193,7 +185,7 @@ public class FieldTransformer { } private void addReadWriteMethods(ClassFile classfile) - throws CannotCompileException, BadBytecode { + throws CannotCompileException { List fields = classfile.getFields(); for (Iterator field_iter = fields.iterator(); field_iter.hasNext();) { FieldInfo finfo = (FieldInfo) field_iter.next(); @@ -213,7 +205,7 @@ public class FieldTransformer { } private void addReadMethod(ClassFile classfile, FieldInfo finfo) - throws CannotCompileException, BadBytecode { + throws CannotCompileException { ConstPool cp = classfile.getConstPool(); int this_class_index = cp.getThisClassInfo(); String desc = "()" + finfo.getDescriptor(); @@ -262,13 +254,11 @@ public class FieldTransformer { minfo.setCodeAttribute(code.toCodeAttribute()); minfo.setAccessFlags(AccessFlag.PUBLIC); - StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo); - minfo.getCodeAttribute().setAttribute(smt); classfile.addMethod(minfo); } private void addWriteMethod(ClassFile classfile, FieldInfo finfo) - throws CannotCompileException, BadBytecode { + throws CannotCompileException { ConstPool cp = classfile.getConstPool(); int this_class_index = cp.getThisClassInfo(); String desc = "(" + finfo.getDescriptor() + ")V"; @@ -330,13 +320,11 @@ public class FieldTransformer { minfo.setCodeAttribute(code.toCodeAttribute()); minfo.setAccessFlags(AccessFlag.PUBLIC); - StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo); - minfo.getCodeAttribute().setAttribute(smt); classfile.addMethod(minfo); } private void transformInvokevirtualsIntoPutAndGetfields(ClassFile classfile) - throws CannotCompileException, BadBytecode { + throws CannotCompileException { List methods = classfile.getMethods(); for (Iterator method_iter = methods.iterator(); method_iter.hasNext();) { MethodInfo minfo = (MethodInfo) method_iter.next(); @@ -353,13 +341,15 @@ public class FieldTransformer { } CodeIterator iter = codeAttr.iterator(); while (iter.hasNext()) { - int pos = iter.next(); - pos = transformInvokevirtualsIntoGetfields(classfile, iter, pos); - pos = transformInvokevirtualsIntoPutfields(classfile, iter, pos); + try { + int pos = iter.next(); + pos = transformInvokevirtualsIntoGetfields(classfile, iter, pos); + pos = transformInvokevirtualsIntoPutfields(classfile, iter, pos); + } catch ( BadBytecode e ){ + throw new CannotCompileException( e ); + } + } - - StackMapTable smt = MapMaker.make(ClassPool.getDefault(), minfo); - minfo.getCodeAttribute().setAttribute(smt); } } diff --git a/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java b/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java index fdcea19270..b5a81b36e3 100644 --- a/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java +++ b/hibernate-core/src/main/java/org/hibernate/cache/internal/StandardQueryCache.java @@ -64,6 +64,8 @@ public class StandardQueryCache implements QueryCache { StandardQueryCache.class.getName() ); + private static final boolean tracing = LOG.isTraceEnabled(); + private QueryResultsRegion cacheRegion; private UpdateTimestampsCache updateTimestampsCache; @@ -246,7 +248,7 @@ public class StandardQueryCache implements QueryCache { } private static void logCachedResultRowDetails(Type[] returnTypes, Object[] tuple) { - if ( !LOG.isTraceEnabled() ) { + if ( !tracing ) { return; } if ( tuple == null ) { diff --git a/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java b/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java index b1ee74cf1c..2be27840ce 100644 --- a/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java +++ b/hibernate-core/src/main/java/org/hibernate/cache/spi/UpdateTimestampsCache.java @@ -26,7 +26,6 @@ package org.hibernate.cache.spi; import java.io.Serializable; import java.util.Properties; import java.util.Set; -import java.util.concurrent.locks.ReentrantReadWriteLock; import org.jboss.logging.Logger; @@ -51,100 +50,96 @@ public class UpdateTimestampsCache { public static final String REGION_NAME = UpdateTimestampsCache.class.getName(); private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, UpdateTimestampsCache.class.getName() ); - private ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock(); - private final TimestampsRegion region; private final SessionFactoryImplementor factory; + private final TimestampsRegion region; public UpdateTimestampsCache(Settings settings, Properties props, final SessionFactoryImplementor factory) throws HibernateException { this.factory = factory; - String prefix = settings.getCacheRegionPrefix(); - String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME; + final String prefix = settings.getCacheRegionPrefix(); + final String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME; + LOG.startingUpdateTimestampsCache( regionName ); this.region = factory.getServiceRegistry().getService( RegionFactory.class ).buildTimestampsRegion( regionName, props ); } + @SuppressWarnings({"UnusedDeclaration"}) - public UpdateTimestampsCache(Settings settings, Properties props) - throws HibernateException { - this(settings, props, null); + public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException { + this( settings, props, null ); } @SuppressWarnings({"UnnecessaryBoxing"}) public void preinvalidate(Serializable[] spaces) throws CacheException { - readWriteLock.writeLock().lock(); + final boolean debug = LOG.isDebugEnabled(); + final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled(); - try { - Long ts = region.nextTimestamp() + region.getTimeout(); - for ( Serializable space : spaces ) { + final Long ts = region.nextTimestamp() + region.getTimeout(); + + for ( Serializable space : spaces ) { + if ( debug ) { LOG.debugf( "Pre-invalidating space [%s], timestamp: %s", space, ts ); - //put() has nowait semantics, is this really appropriate? - //note that it needs to be async replication, never local or sync - region.put( space, ts ); - if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { - factory.getStatisticsImplementor().updateTimestampsCachePut(); - } } - } - finally { - readWriteLock.writeLock().unlock(); + //put() has nowait semantics, is this really appropriate? + //note that it needs to be async replication, never local or sync + region.put( space, ts ); + if ( stats ) { + factory.getStatisticsImplementor().updateTimestampsCachePut(); + } } } - @SuppressWarnings({"UnnecessaryBoxing"}) + @SuppressWarnings({"UnnecessaryBoxing"}) public void invalidate(Serializable[] spaces) throws CacheException { - readWriteLock.writeLock().lock(); + final boolean debug = LOG.isDebugEnabled(); + final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled(); - try { - Long ts = region.nextTimestamp(); - for (Serializable space : spaces) { + final Long ts = region.nextTimestamp(); + + for (Serializable space : spaces) { + if ( debug ) { LOG.debugf( "Invalidating space [%s], timestamp: %s", space, ts ); - //put() has nowait semantics, is this really appropriate? - //note that it needs to be async replication, never local or sync - region.put( space, ts ); - if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { - factory.getStatisticsImplementor().updateTimestampsCachePut(); - } } - } - finally { - readWriteLock.writeLock().unlock(); + //put() has nowait semantics, is this really appropriate? + //note that it needs to be async replication, never local or sync + region.put( space, ts ); + if ( stats ) { + factory.getStatisticsImplementor().updateTimestampsCachePut(); + } } } @SuppressWarnings({"unchecked", "UnnecessaryUnboxing"}) public boolean isUpToDate(Set spaces, Long timestamp) throws HibernateException { - readWriteLock.readLock().lock(); + final boolean debug = LOG.isDebugEnabled(); + final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled(); - try { - for ( Serializable space : (Set) spaces ) { - Long lastUpdate = (Long) region.get( space ); - if ( lastUpdate == null ) { - if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { - factory.getStatisticsImplementor().updateTimestampsCacheMiss(); - } - //the last update timestamp was lost from the cache - //(or there were no updates since startup!) - //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) ); - //result = false; // safer + for ( Serializable space : (Set) spaces ) { + Long lastUpdate = (Long) region.get( space ); + if ( lastUpdate == null ) { + if ( stats ) { + factory.getStatisticsImplementor().updateTimestampsCacheMiss(); } - else { - if ( LOG.isDebugEnabled() ) { - LOG.debugf( - "[%s] last update timestamp: %s", - space, - lastUpdate + ", result set timestamp: " + timestamp - ); - } - if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { - factory.getStatisticsImplementor().updateTimestampsCacheHit(); - } - if ( lastUpdate >= timestamp ) return false; + //the last update timestamp was lost from the cache + //(or there were no updates since startup!) + //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) ); + //result = false; // safer + } + else { + if ( debug ) { + LOG.debugf( + "[%s] last update timestamp: %s", + space, + lastUpdate + ", result set timestamp: " + timestamp + ); + } + if ( stats ) { + factory.getStatisticsImplementor().updateTimestampsCacheHit(); + } + if ( lastUpdate >= timestamp ) { + return false; } } - return true; - } - finally { - readWriteLock.readLock().unlock(); } + return true; } public void clear() throws CacheException { diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java b/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java index 7d5b8848fe..d71d65d421 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/AvailableSettings.java @@ -269,7 +269,7 @@ public interface AvailableSettings { public static final String CURRENT_SESSION_CONTEXT_CLASS = "hibernate.current_session_context_class"; /** - * Names the implementation of {@link org.hibernate.engine.transaction.spi.TransactionContext} to use for + * Names the implementation of {@link org.hibernate.engine.transaction.spi.TransactionFactory} to use for * creating {@link org.hibernate.Transaction} instances */ public static final String TRANSACTION_STRATEGY = "hibernate.transaction.factory_class"; @@ -643,4 +643,13 @@ public interface AvailableSettings { // todo : add to Environment String SCHEMA_NAME_RESOLVER = "hibernate.schema_name_resolver"; public static final String ENABLE_LAZY_LOAD_NO_TRANS = "hibernate.enable_lazy_load_no_trans"; + + public static final String HQL_BULK_ID_STRATEGY = "hibernate.hql.bulk_id_strategy"; + + /** + * Names the {@link org.hibernate.loader.BatchFetchStyle} to use. Can specify either the + * {@link org.hibernate.loader.BatchFetchStyle} name (insensitively), or a + * {@link org.hibernate.loader.BatchFetchStyle} instance. + */ + public static final String BATCH_FETCH_STYLE = "hibernate.batch_fetch_style"; } diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java b/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java index 5163259131..e361467be3 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java @@ -2418,7 +2418,9 @@ public class Configuration implements Serializable { } public void addSqlFunction(String functionName, SQLFunction function) { - sqlFunctions.put( functionName, function ); + // HHH-7721: SQLFunctionRegistry expects all lowercase. Enforce, + // just in case a user's customer dialect uses mixed cases. + sqlFunctions.put( functionName.toLowerCase(), function ); } public TypeResolver getTypeResolver() { diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java b/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java index d687f9e846..32bc128277 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/Settings.java @@ -29,8 +29,10 @@ import org.hibernate.ConnectionReleaseMode; import org.hibernate.EntityMode; import org.hibernate.MultiTenancyStrategy; import org.hibernate.cache.spi.QueryCacheFactory; -import org.hibernate.hql.spi.QueryTranslatorFactory; import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; +import org.hibernate.hql.spi.MultiTableBulkIdStrategy; +import org.hibernate.hql.spi.QueryTranslatorFactory; +import org.hibernate.loader.BatchFetchStyle; import org.hibernate.tuple.entity.EntityTuplizerFactory; /** @@ -77,6 +79,7 @@ public final class Settings { private boolean namedQueryStartupCheckingEnabled; private EntityTuplizerFactory entityTuplizerFactory; private boolean checkNullability; + private boolean initializeLazyStateOutsideTransactions; // private ComponentTuplizerFactory componentTuplizerFactory; todo : HHH-3517 and HHH-1907 // private BytecodeProvider bytecodeProvider; private String importFiles; @@ -84,6 +87,10 @@ public final class Settings { private JtaPlatform jtaPlatform; + private MultiTableBulkIdStrategy multiTableBulkIdStrategy; + private BatchFetchStyle batchFetchStyle; + + /** * Package protected constructor */ @@ -411,4 +418,28 @@ public final class Settings { void setMultiTenancyStrategy(MultiTenancyStrategy multiTenancyStrategy) { this.multiTenancyStrategy = multiTenancyStrategy; } + + public boolean isInitializeLazyStateOutsideTransactionsEnabled() { + return initializeLazyStateOutsideTransactions; + } + + void setInitializeLazyStateOutsideTransactions(boolean initializeLazyStateOutsideTransactions) { + this.initializeLazyStateOutsideTransactions = initializeLazyStateOutsideTransactions; + } + + public MultiTableBulkIdStrategy getMultiTableBulkIdStrategy() { + return multiTableBulkIdStrategy; + } + + void setMultiTableBulkIdStrategy(MultiTableBulkIdStrategy multiTableBulkIdStrategy) { + this.multiTableBulkIdStrategy = multiTableBulkIdStrategy; + } + + public BatchFetchStyle getBatchFetchStyle() { + return batchFetchStyle; + } + + void setBatchFetchStyle(BatchFetchStyle batchFetchStyle) { + this.batchFetchStyle = batchFetchStyle; + } } diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java b/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java index 7926e47d60..173733ba4a 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/SettingsFactory.java @@ -27,30 +27,34 @@ import java.io.Serializable; import java.util.Map; import java.util.Properties; -import org.jboss.logging.Logger; - import org.hibernate.ConnectionReleaseMode; import org.hibernate.EntityMode; import org.hibernate.HibernateException; import org.hibernate.MultiTenancyStrategy; +import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; +import org.hibernate.boot.registry.selector.spi.StrategySelector; import org.hibernate.cache.internal.NoCachingRegionFactory; import org.hibernate.cache.internal.RegionFactoryInitiator; import org.hibernate.cache.internal.StandardQueryCacheFactory; import org.hibernate.cache.spi.QueryCacheFactory; import org.hibernate.cache.spi.RegionFactory; +import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; +import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider; +import org.hibernate.engine.jdbc.env.spi.ExtractedDatabaseMetaData; import org.hibernate.engine.jdbc.spi.JdbcServices; +import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; import org.hibernate.engine.transaction.spi.TransactionFactory; +import org.hibernate.hql.spi.MultiTableBulkIdStrategy; +import org.hibernate.hql.spi.PersistentTableBulkIdStrategy; import org.hibernate.hql.spi.QueryTranslatorFactory; +import org.hibernate.hql.spi.TemporaryTableBulkIdStrategy; import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.config.ConfigurationHelper; +import org.hibernate.loader.BatchFetchStyle; import org.hibernate.service.ServiceRegistry; -import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; -import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; -import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider; -import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; -import org.hibernate.engine.jdbc.env.spi.ExtractedDatabaseMetaData; import org.hibernate.tuple.entity.EntityTuplizerFactory; +import org.jboss.logging.Logger; /** * Reads configuration properties and builds a {@link Settings} instance. @@ -75,7 +79,7 @@ public class SettingsFactory implements Serializable { //SessionFactory name: - String sessionFactoryName = props.getProperty( Environment.SESSION_FACTORY_NAME ); + String sessionFactoryName = props.getProperty( AvailableSettings.SESSION_FACTORY_NAME ); settings.setSessionFactoryName( sessionFactoryName ); settings.setSessionFactoryNameAlsoJndiName( ConfigurationHelper.getBoolean( AvailableSettings.SESSION_FACTORY_NAME_IS_JNDI, props, true ) @@ -97,13 +101,25 @@ public class SettingsFactory implements Serializable { // Transaction settings: settings.setJtaPlatform( serviceRegistry.getService( JtaPlatform.class ) ); - boolean flushBeforeCompletion = ConfigurationHelper.getBoolean(Environment.FLUSH_BEFORE_COMPLETION, properties); + MultiTableBulkIdStrategy multiTableBulkIdStrategy = serviceRegistry.getService( StrategySelector.class ) + .resolveStrategy( + MultiTableBulkIdStrategy.class, + properties.getProperty( AvailableSettings.HQL_BULK_ID_STRATEGY ) + ); + if ( multiTableBulkIdStrategy == null ) { + multiTableBulkIdStrategy = jdbcServices.getDialect().supportsTemporaryTables() + ? TemporaryTableBulkIdStrategy.INSTANCE + : new PersistentTableBulkIdStrategy(); + } + settings.setMultiTableBulkIdStrategy( multiTableBulkIdStrategy ); + + boolean flushBeforeCompletion = ConfigurationHelper.getBoolean(AvailableSettings.FLUSH_BEFORE_COMPLETION, properties); if ( debugEnabled ) { LOG.debugf( "Automatic flush during beforeCompletion(): %s", enabledDisabled(flushBeforeCompletion) ); } settings.setFlushBeforeCompletionEnabled(flushBeforeCompletion); - boolean autoCloseSession = ConfigurationHelper.getBoolean(Environment.AUTO_CLOSE_SESSION, properties); + boolean autoCloseSession = ConfigurationHelper.getBoolean(AvailableSettings.AUTO_CLOSE_SESSION, properties); if ( debugEnabled ) { LOG.debugf( "Automatic session close at end of transaction: %s", enabledDisabled(autoCloseSession) ); } @@ -111,7 +127,7 @@ public class SettingsFactory implements Serializable { //JDBC and connection settings: - int batchSize = ConfigurationHelper.getInt(Environment.STATEMENT_BATCH_SIZE, properties, 0); + int batchSize = ConfigurationHelper.getInt(AvailableSettings.STATEMENT_BATCH_SIZE, properties, 0); if ( !meta.supportsBatchUpdates() ) { batchSize = 0; } @@ -120,14 +136,14 @@ public class SettingsFactory implements Serializable { } settings.setJdbcBatchSize(batchSize); - boolean jdbcBatchVersionedData = ConfigurationHelper.getBoolean(Environment.BATCH_VERSIONED_DATA, properties, false); + boolean jdbcBatchVersionedData = ConfigurationHelper.getBoolean(AvailableSettings.BATCH_VERSIONED_DATA, properties, false); if ( batchSize > 0 && debugEnabled ) { LOG.debugf( "JDBC batch updates for versioned data: %s", enabledDisabled(jdbcBatchVersionedData) ); } settings.setJdbcBatchVersionedData(jdbcBatchVersionedData); boolean useScrollableResultSets = ConfigurationHelper.getBoolean( - Environment.USE_SCROLLABLE_RESULTSET, + AvailableSettings.USE_SCROLLABLE_RESULTSET, properties, meta.supportsScrollableResults() ); @@ -136,19 +152,19 @@ public class SettingsFactory implements Serializable { } settings.setScrollableResultSetsEnabled(useScrollableResultSets); - boolean wrapResultSets = ConfigurationHelper.getBoolean(Environment.WRAP_RESULT_SETS, properties, false); + boolean wrapResultSets = ConfigurationHelper.getBoolean(AvailableSettings.WRAP_RESULT_SETS, properties, false); if ( debugEnabled ) { LOG.debugf( "Wrap result sets: %s", enabledDisabled(wrapResultSets) ); } settings.setWrapResultSetsEnabled(wrapResultSets); - boolean useGetGeneratedKeys = ConfigurationHelper.getBoolean(Environment.USE_GET_GENERATED_KEYS, properties, meta.supportsGetGeneratedKeys()); + boolean useGetGeneratedKeys = ConfigurationHelper.getBoolean(AvailableSettings.USE_GET_GENERATED_KEYS, properties, meta.supportsGetGeneratedKeys()); if ( debugEnabled ) { LOG.debugf( "JDBC3 getGeneratedKeys(): %s", enabledDisabled(useGetGeneratedKeys) ); } settings.setGetGeneratedKeysEnabled(useGetGeneratedKeys); - Integer statementFetchSize = ConfigurationHelper.getInteger(Environment.STATEMENT_FETCH_SIZE, properties); + Integer statementFetchSize = ConfigurationHelper.getInteger(AvailableSettings.STATEMENT_FETCH_SIZE, properties); if ( statementFetchSize != null && debugEnabled ) { LOG.debugf( "JDBC result set fetch size: %s", statementFetchSize ); } @@ -160,7 +176,7 @@ public class SettingsFactory implements Serializable { } settings.setMultiTenancyStrategy( multiTenancyStrategy ); - String releaseModeName = ConfigurationHelper.getString( Environment.RELEASE_CONNECTIONS, properties, "auto" ); + String releaseModeName = ConfigurationHelper.getString( AvailableSettings.RELEASE_CONNECTIONS, properties, "auto" ); if ( debugEnabled ) { LOG.debugf( "Connection release mode: %s", releaseModeName ); } @@ -183,10 +199,15 @@ public class SettingsFactory implements Serializable { } settings.setConnectionReleaseMode( releaseMode ); + final BatchFetchStyle batchFetchStyle = BatchFetchStyle.interpret( properties.get( AvailableSettings.BATCH_FETCH_STYLE ) ); + LOG.debugf( "Using BatchFetchStyle : " + batchFetchStyle.name() ); + settings.setBatchFetchStyle( batchFetchStyle ); + + //SQL Generation settings: - String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA ); - String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG ); + String defaultSchema = properties.getProperty( AvailableSettings.DEFAULT_SCHEMA ); + String defaultCatalog = properties.getProperty( AvailableSettings.DEFAULT_CATALOG ); if ( defaultSchema != null && debugEnabled ) { LOG.debugf( "Default schema: %s", defaultSchema ); } @@ -196,31 +217,31 @@ public class SettingsFactory implements Serializable { settings.setDefaultSchemaName( defaultSchema ); settings.setDefaultCatalogName( defaultCatalog ); - Integer maxFetchDepth = ConfigurationHelper.getInteger( Environment.MAX_FETCH_DEPTH, properties ); + Integer maxFetchDepth = ConfigurationHelper.getInteger( AvailableSettings.MAX_FETCH_DEPTH, properties ); if ( maxFetchDepth != null ) { LOG.debugf( "Maximum outer join fetch depth: %s", maxFetchDepth ); } settings.setMaximumFetchDepth( maxFetchDepth ); - int batchFetchSize = ConfigurationHelper.getInt(Environment.DEFAULT_BATCH_FETCH_SIZE, properties, 1); + int batchFetchSize = ConfigurationHelper.getInt(AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, properties, 1); if ( debugEnabled ) { LOG.debugf( "Default batch fetch size: %s", batchFetchSize ); } settings.setDefaultBatchFetchSize( batchFetchSize ); - boolean comments = ConfigurationHelper.getBoolean( Environment.USE_SQL_COMMENTS, properties ); + boolean comments = ConfigurationHelper.getBoolean( AvailableSettings.USE_SQL_COMMENTS, properties ); if ( debugEnabled ) { LOG.debugf( "Generate SQL with comments: %s", enabledDisabled(comments) ); } settings.setCommentsEnabled( comments ); - boolean orderUpdates = ConfigurationHelper.getBoolean( Environment.ORDER_UPDATES, properties ); + boolean orderUpdates = ConfigurationHelper.getBoolean( AvailableSettings.ORDER_UPDATES, properties ); if ( debugEnabled ) { LOG.debugf( "Order SQL updates by primary key: %s", enabledDisabled(orderUpdates) ); } settings.setOrderUpdatesEnabled( orderUpdates ); - boolean orderInserts = ConfigurationHelper.getBoolean(Environment.ORDER_INSERTS, properties); + boolean orderInserts = ConfigurationHelper.getBoolean(AvailableSettings.ORDER_INSERTS, properties); if ( debugEnabled ) { LOG.debugf( "Order SQL inserts for batching: %s", enabledDisabled(orderInserts) ); } @@ -230,13 +251,13 @@ public class SettingsFactory implements Serializable { settings.setQueryTranslatorFactory( createQueryTranslatorFactory( properties, serviceRegistry ) ); - Map querySubstitutions = ConfigurationHelper.toMap( Environment.QUERY_SUBSTITUTIONS, " ,=;:\n\t\r\f", properties ); + Map querySubstitutions = ConfigurationHelper.toMap( AvailableSettings.QUERY_SUBSTITUTIONS, " ,=;:\n\t\r\f", properties ); if ( debugEnabled ) { LOG.debugf( "Query language substitutions: %s", querySubstitutions ); } settings.setQuerySubstitutions( querySubstitutions ); - boolean jpaqlCompliance = ConfigurationHelper.getBoolean( Environment.JPAQL_STRICT_COMPLIANCE, properties, false ); + boolean jpaqlCompliance = ConfigurationHelper.getBoolean( AvailableSettings.JPAQL_STRICT_COMPLIANCE, properties, false ); if ( debugEnabled ) { LOG.debugf( "JPA-QL strict compliance: %s", enabledDisabled(jpaqlCompliance) ); } @@ -244,13 +265,13 @@ public class SettingsFactory implements Serializable { // Second-level / query cache: - boolean useSecondLevelCache = ConfigurationHelper.getBoolean( Environment.USE_SECOND_LEVEL_CACHE, properties, true ); + boolean useSecondLevelCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_SECOND_LEVEL_CACHE, properties, true ); if ( debugEnabled ) { LOG.debugf( "Second-level cache: %s", enabledDisabled(useSecondLevelCache) ); } settings.setSecondLevelCacheEnabled( useSecondLevelCache ); - boolean useQueryCache = ConfigurationHelper.getBoolean(Environment.USE_QUERY_CACHE, properties); + boolean useQueryCache = ConfigurationHelper.getBoolean(AvailableSettings.USE_QUERY_CACHE, properties); if ( debugEnabled ) { LOG.debugf( "Query cache: %s", enabledDisabled(useQueryCache) ); } @@ -268,13 +289,13 @@ public class SettingsFactory implements Serializable { } settings.setCacheRegionPrefix( prefix ); - boolean useStructuredCacheEntries = ConfigurationHelper.getBoolean( Environment.USE_STRUCTURED_CACHE, properties, false ); + boolean useStructuredCacheEntries = ConfigurationHelper.getBoolean( AvailableSettings.USE_STRUCTURED_CACHE, properties, false ); if ( debugEnabled ) { LOG.debugf( "Structured second-level cache entries: %s", enabledDisabled(useStructuredCacheEntries) ); } settings.setStructuredCacheEntriesEnabled( useStructuredCacheEntries ); - boolean useIdentifierRollback = ConfigurationHelper.getBoolean( Environment.USE_IDENTIFIER_ROLLBACK, properties ); + boolean useIdentifierRollback = ConfigurationHelper.getBoolean( AvailableSettings.USE_IDENTIFIER_ROLLBACK, properties ); if ( debugEnabled ) { LOG.debugf( "Deleted entity synthetic identifier rollback: %s", enabledDisabled(useIdentifierRollback) ); } @@ -282,7 +303,7 @@ public class SettingsFactory implements Serializable { //Schema export: - String autoSchemaExport = properties.getProperty( Environment.HBM2DDL_AUTO ); + String autoSchemaExport = properties.getProperty( AvailableSettings.HBM2DDL_AUTO ); if ( "validate".equals(autoSchemaExport) ) { settings.setAutoValidateSchema( true ); } @@ -296,21 +317,21 @@ public class SettingsFactory implements Serializable { settings.setAutoCreateSchema( true ); settings.setAutoDropSchema( true ); } - settings.setImportFiles( properties.getProperty( Environment.HBM2DDL_IMPORT_FILES ) ); + settings.setImportFiles( properties.getProperty( AvailableSettings.HBM2DDL_IMPORT_FILES ) ); - EntityMode defaultEntityMode = EntityMode.parse( properties.getProperty( Environment.DEFAULT_ENTITY_MODE ) ); + EntityMode defaultEntityMode = EntityMode.parse( properties.getProperty( AvailableSettings.DEFAULT_ENTITY_MODE ) ); if ( debugEnabled ) { LOG.debugf( "Default entity-mode: %s", defaultEntityMode ); } settings.setDefaultEntityMode( defaultEntityMode ); - boolean namedQueryChecking = ConfigurationHelper.getBoolean( Environment.QUERY_STARTUP_CHECKING, properties, true ); + boolean namedQueryChecking = ConfigurationHelper.getBoolean( AvailableSettings.QUERY_STARTUP_CHECKING, properties, true ); if ( debugEnabled ) { LOG.debugf( "Named query checking : %s", enabledDisabled(namedQueryChecking) ); } settings.setNamedQueryStartupCheckingEnabled( namedQueryChecking ); - boolean checkNullability = ConfigurationHelper.getBoolean(Environment.CHECK_NULLABILITY, properties, true); + boolean checkNullability = ConfigurationHelper.getBoolean(AvailableSettings.CHECK_NULLABILITY, properties, true); if ( debugEnabled ) { LOG.debugf( "Check Nullability in Core (should be disabled when Bean Validation is on): %s", enabledDisabled(checkNullability) ); } @@ -319,11 +340,21 @@ public class SettingsFactory implements Serializable { // TODO: Does EntityTuplizerFactory really need to be configurable? revisit for HHH-6383 settings.setEntityTuplizerFactory( new EntityTuplizerFactory() ); -// String provider = properties.getProperty( Environment.BYTECODE_PROVIDER ); +// String provider = properties.getProperty( AvailableSettings.BYTECODE_PROVIDER ); // log.info( "Bytecode provider name : " + provider ); // BytecodeProvider bytecodeProvider = buildBytecodeProvider( provider ); // settings.setBytecodeProvider( bytecodeProvider ); + boolean initializeLazyStateOutsideTransactionsEnabled = ConfigurationHelper.getBoolean( + AvailableSettings.ENABLE_LAZY_LOAD_NO_TRANS, + properties, + false + ); + if ( debugEnabled ) { + LOG.debugf( "Allow initialization of lazy state outside session : : %s", enabledDisabled( initializeLazyStateOutsideTransactionsEnabled ) ); + } + settings.setInitializeLazyStateOutsideTransactions( initializeLazyStateOutsideTransactionsEnabled ); + return settings; } @@ -344,7 +375,7 @@ public class SettingsFactory implements Serializable { protected QueryCacheFactory createQueryCacheFactory(Properties properties, ServiceRegistry serviceRegistry) { String queryCacheFactoryClassName = ConfigurationHelper.getString( - Environment.QUERY_CACHE_FACTORY, properties, StandardQueryCacheFactory.class.getName() + AvailableSettings.QUERY_CACHE_FACTORY, properties, StandardQueryCacheFactory.class.getName() ); LOG.debugf( "Query cache factory: %s", queryCacheFactoryClassName ); try { @@ -362,7 +393,7 @@ public class SettingsFactory implements Serializable { // todo : REMOVE! THIS IS TOTALLY A TEMPORARY HACK FOR org.hibernate.cfg.AnnotationBinder which will be going away String regionFactoryClassName = RegionFactoryInitiator.mapLegacyNames( ConfigurationHelper.getString( - Environment.CACHE_REGION_FACTORY, properties, null + AvailableSettings.CACHE_REGION_FACTORY, properties, null ) ); if ( regionFactoryClassName == null ) { @@ -392,7 +423,7 @@ public class SettingsFactory implements Serializable { protected QueryTranslatorFactory createQueryTranslatorFactory(Properties properties, ServiceRegistry serviceRegistry) { String className = ConfigurationHelper.getString( - Environment.QUERY_TRANSLATOR, properties, "org.hibernate.hql.internal.ast.ASTQueryTranslatorFactory" + AvailableSettings.QUERY_TRANSLATOR, properties, "org.hibernate.hql.internal.ast.ASTQueryTranslatorFactory" ); LOG.debugf( "Query translator: %s", className ); try { diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java index e0d64516fc..7d8b4bad2d 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/CollectionBinder.java @@ -795,7 +795,7 @@ public abstract class CollectionBinder { String entityName = oneToMany.getReferencedEntityName(); PersistentClass referenced = mappings.getClass( entityName ); Backref prop = new Backref(); - prop.setName( '_' + fkJoinColumns[0].getPropertyName() + "Backref" ); + prop.setName( '_' + fkJoinColumns[0].getPropertyName() + '_' + fkJoinColumns[0].getLogicalColumnName() + "Backref" ); prop.setUpdateable( false ); prop.setSelectable( false ); prop.setCollectionRole( collection.getRole() ); diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java index 27e9235a89..0aa513d6b3 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/PropertyBinder.java @@ -24,10 +24,10 @@ package org.hibernate.cfg.annotations; import java.util.Map; + import javax.persistence.EmbeddedId; import javax.persistence.Id; - -import org.jboss.logging.Logger; +import javax.persistence.Lob; import org.hibernate.AnnotationException; import org.hibernate.annotations.Generated; @@ -57,6 +57,7 @@ import org.hibernate.mapping.RootClass; import org.hibernate.mapping.SimpleValue; import org.hibernate.mapping.ToOne; import org.hibernate.mapping.Value; +import org.jboss.logging.Logger; /** * @author Emmanuel Bernard @@ -264,6 +265,7 @@ public class PropertyBinder { prop.setLazy( lazy ); prop.setCascade( cascade ); prop.setPropertyAccessorName( accessType.getType() ); + Generated ann = property != null ? property.getAnnotation( Generated.class ) : null; @@ -286,6 +288,7 @@ public class PropertyBinder { prop.setGeneration( PropertyGeneration.parse( generated.toString().toLowerCase() ) ); } } + NaturalId naturalId = property != null ? property.getAnnotation( NaturalId.class ) : null; if ( naturalId != null ) { if ( ! entityBinder.isRootEntity() ) { @@ -296,6 +299,11 @@ public class PropertyBinder { } prop.setNaturalIdentifier( true ); } + + // HHH-4635 -- needed for dialect-specific property ordering + Lob lob = property != null ? property.getAnnotation( Lob.class ) : null; + prop.setLob( lob != null ); + prop.setInsertable( insertable ); prop.setUpdateable( updatable ); diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java index e8c46256ef..d4b77da49a 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/annotations/SimpleValueBinder.java @@ -28,6 +28,7 @@ import java.lang.reflect.TypeVariable; import java.util.Calendar; import java.util.Date; import java.util.Properties; + import javax.persistence.AttributeConverter; import javax.persistence.Convert; import javax.persistence.Converts; @@ -227,7 +228,6 @@ public class SimpleValueBinder { .toXClass( Serializable.class ) .isAssignableFrom( returnedClassOrElement ) ) { type = SerializableToBlobType.class.getName(); - //typeParameters = new Properties(); typeParameters.setProperty( SerializableToBlobType.CLASS_NAME, returnedClassOrElement.getName() @@ -618,6 +618,7 @@ public class SimpleValueBinder { parameters.put( DynamicParameterizedType.IS_PRIMARY_KEY, Boolean.toString( key ) ); parameters.put( DynamicParameterizedType.ENTITY, persistentClassName ); + parameters.put( DynamicParameterizedType.XPROPERTY, xproperty ); parameters.put( DynamicParameterizedType.PROPERTY, xproperty.getName() ); parameters.put( DynamicParameterizedType.ACCESS_TYPE, accessType.getType() ); simpleValue.setTypeParameters( parameters ); diff --git a/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java b/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java index 2038ef3d2e..05ce17b45e 100644 --- a/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java +++ b/hibernate-core/src/main/java/org/hibernate/collection/internal/AbstractPersistentCollection.java @@ -33,13 +33,12 @@ import java.util.List; import java.util.ListIterator; import javax.naming.NamingException; -import org.jboss.logging.Logger; +import javax.naming.NamingException; import org.hibernate.AssertionFailure; import org.hibernate.HibernateException; import org.hibernate.LazyInitializationException; import org.hibernate.Session; -import org.hibernate.cfg.AvailableSettings; import org.hibernate.collection.spi.PersistentCollection; import org.hibernate.engine.internal.ForeignKeys; import org.hibernate.engine.spi.CollectionEntry; @@ -56,6 +55,7 @@ import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.entity.EntityPersister; import org.hibernate.pretty.MessageHelper; import org.hibernate.type.Type; +import org.jboss.logging.Logger; /** * Base class implementing {@link org.hibernate.collection.spi.PersistentCollection} @@ -140,16 +140,22 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers @Override public Boolean doWork() { CollectionEntry entry = session.getPersistenceContext().getCollectionEntry( AbstractPersistentCollection.this ); - CollectionPersister persister = entry.getLoadedPersister(); - if ( persister.isExtraLazy() ) { - if ( hasQueuedOperations() ) { - session.flush(); + + if ( entry != null ) { + CollectionPersister persister = entry.getLoadedPersister(); + if ( persister.isExtraLazy() ) { + if ( hasQueuedOperations() ) { + session.flush(); + } + cachedSize = persister.getSize( entry.getLoadedKey(), session ); + return true; + } + else { + read(); } - cachedSize = persister.getSize( entry.getLoadedKey(), session ); - return true; } - else { - read(); + else{ + throwLazyInitializationExceptionIfNotConnected(); } return false; } @@ -170,6 +176,7 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers private T withTemporarySessionIfNeeded(LazyInitializationWork lazyInitializationWork) { SessionImplementor originalSession = null; boolean isTempSession = false; + boolean isJTA = false; if ( session == null ) { if ( specjLazyLoad ) { @@ -202,6 +209,22 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers } if ( isTempSession ) { + // TODO: On the next major release, add an + // 'isJTA' or 'getTransactionFactory' method to Session. + isJTA = session.getTransactionCoordinator() + .getTransactionContext().getTransactionEnvironment() + .getTransactionFactory() + .compatibleWithJtaSynchronization(); + + if ( !isJTA ) { + // Explicitly handle the transactions only if we're not in + // a JTA environment. A lazy loading temporary session can + // be created even if a current session and transaction are + // open (ex: session.clear() was used). We must prevent + // multiple transactions. + ( ( Session) session ).beginTransaction(); + } + session.getPersistenceContext().addUninitializedDetachedCollection( session.getFactory().getCollectionPersister( getRole() ), this @@ -215,6 +238,9 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers if ( isTempSession ) { // make sure the just opened temp session gets closed! try { + if ( !isJTA ) { + ( ( Session) session ).getTransaction().commit(); + } ( (Session) session ).close(); } catch (Exception e) { @@ -580,11 +606,7 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers protected void prepareForPossibleSpecialSpecjInitialization() { if ( session != null ) { - specjLazyLoad = Boolean.parseBoolean( - session.getFactory() - .getProperties() - .getProperty( AvailableSettings.ENABLE_LAZY_LOAD_NO_TRANS ) - ); + specjLazyLoad = session.getFactory().getSettings().isInitializeLazyStateOutsideTransactionsEnabled(); if ( specjLazyLoad && sessionFactoryUuid == null ) { try { @@ -622,9 +644,8 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers throw new HibernateException( "Illegal attempt to associate a collection with two open sessions: " + MessageHelper.collectionInfoString( - ce.getLoadedPersister(), - ce.getLoadedKey(), - session.getFactory() + ce.getLoadedPersister(), this, + ce.getLoadedKey(), session ) ); } diff --git a/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java b/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java index 02ea3a6f4e..3419e783c6 100644 --- a/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java +++ b/hibernate-core/src/main/java/org/hibernate/collection/internal/PersistentMap.java @@ -296,6 +296,7 @@ public class PersistentMap extends AbstractPersistentCollection implements Map { for ( Object[] entry : loadingEntries ) { map.put( entry[0], entry[1] ); } + loadingEntries = null; } return super.endRead(); } diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java index c9b4291dd1..85aacdef71 100755 --- a/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/CUBRIDDialect.java @@ -31,6 +31,9 @@ import org.hibernate.cfg.Environment; import org.hibernate.dialect.function.NoArgSQLFunction; import org.hibernate.dialect.function.StandardSQLFunction; import org.hibernate.dialect.function.VarArgsSQLFunction; +import org.hibernate.dialect.pagination.LimitHandler; +import org.hibernate.dialect.pagination.CUBRIDLimitHandler; +import org.hibernate.engine.spi.RowSelection; import org.hibernate.type.StandardBasicTypes; /** @@ -39,94 +42,220 @@ import org.hibernate.type.StandardBasicTypes; * @author Seok Jeong Il */ public class CUBRIDDialect extends Dialect { - @Override - protected String getIdentityColumnString() throws MappingException { - return "auto_increment"; //starts with 1, implicitly - } - - @Override - public String getIdentitySelectString(String table, String column, int type) - throws MappingException { - // CUBRID 8.4.0 support last_insert_id() - // return "select last_insert_id()"; - return "select current_val from db_serial where name = '" + ( table + "_ai_" + column ).toLowerCase() + "'"; - } - public CUBRIDDialect() { super(); - registerColumnType( Types.BIT, "bit(8)" ); - registerColumnType( Types.BIGINT, "numeric(19,0)" ); + registerColumnType( Types.BIGINT, "bigint" ); + registerColumnType( Types.BIT, "bit(8)" ); + registerColumnType( Types.BLOB, "bit varying(65535)" ); + registerColumnType( Types.BOOLEAN, "bit(8)"); + registerColumnType( Types.CHAR, "char(1)" ); + registerColumnType( Types.CLOB, "string" ); + registerColumnType( Types.DATE, "date" ); + registerColumnType( Types.DECIMAL, "decimal" ); + registerColumnType( Types.DOUBLE, "double" ); + registerColumnType( Types.FLOAT, "float" ); + registerColumnType( Types.INTEGER, "int" ); + registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); + registerColumnType( Types.REAL, "double" ); registerColumnType( Types.SMALLINT, "short" ); - registerColumnType( Types.TINYINT, "short" ); - registerColumnType( Types.INTEGER, "integer" ); - registerColumnType( Types.CHAR, "char(1)" ); - registerColumnType( Types.VARCHAR, 4000, "varchar($l)" ); - registerColumnType( Types.FLOAT, "float" ); - registerColumnType( Types.DOUBLE, "double" ); - registerColumnType( Types.DATE, "date" ); - registerColumnType( Types.TIME, "time" ); + registerColumnType( Types.TIME, "time" ); registerColumnType( Types.TIMESTAMP, "timestamp" ); - registerColumnType( Types.VARBINARY, 2000, "bit varying($l)" ); - registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); - registerColumnType( Types.BLOB, "blob" ); - registerColumnType( Types.CLOB, "string" ); + registerColumnType( Types.TINYINT, "short" ); + registerColumnType( Types.VARBINARY, 2000, "bit varying($l)" ); + registerColumnType( Types.VARCHAR, "string" ); + registerColumnType( Types.VARCHAR, 2000, "varchar($l)" ); + registerColumnType( Types.VARCHAR, 255, "varchar($l)" ); - getDefaultProperties().setProperty( Environment.USE_STREAMS_FOR_BINARY, "true" ); - getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); + getDefaultProperties().setProperty(Environment.USE_STREAMS_FOR_BINARY, "true"); + getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE); - registerFunction( "substring", new StandardSQLFunction( "substr", StandardBasicTypes.STRING ) ); - registerFunction( "trim", new StandardSQLFunction( "trim" ) ); - registerFunction( "length", new StandardSQLFunction( "length", StandardBasicTypes.INTEGER ) ); - registerFunction( "bit_length", new StandardSQLFunction( "bit_length", StandardBasicTypes.INTEGER ) ); - registerFunction( "coalesce", new StandardSQLFunction( "coalesce" ) ); - registerFunction( "nullif", new StandardSQLFunction( "nullif" ) ); - registerFunction( "abs", new StandardSQLFunction( "abs" ) ); - registerFunction( "mod", new StandardSQLFunction( "mod" ) ); - registerFunction( "upper", new StandardSQLFunction( "upper" ) ); - registerFunction( "lower", new StandardSQLFunction( "lower" ) ); + registerFunction("ascii", new StandardSQLFunction("ascii", StandardBasicTypes.INTEGER) ); + registerFunction("bin", new StandardSQLFunction("bin", StandardBasicTypes.STRING) ); + registerFunction("char_length", new StandardSQLFunction("char_length", StandardBasicTypes.LONG) ); + registerFunction("character_length", new StandardSQLFunction("character_length", StandardBasicTypes.LONG) ); + registerFunction("lengthb", new StandardSQLFunction("lengthb", StandardBasicTypes.LONG) ); + registerFunction("lengthh", new StandardSQLFunction("lengthh", StandardBasicTypes.LONG) ); + registerFunction("lcase", new StandardSQLFunction("lcase") ); + registerFunction("lower", new StandardSQLFunction("lower") ); + registerFunction("ltrim", new StandardSQLFunction("ltrim") ); + registerFunction("reverse", new StandardSQLFunction("reverse") ); + registerFunction("rtrim", new StandardSQLFunction("rtrim") ); + registerFunction("trim", new StandardSQLFunction("trim") ); + registerFunction("space", new StandardSQLFunction("space", StandardBasicTypes.STRING) ); + registerFunction("ucase", new StandardSQLFunction("ucase") ); + registerFunction("upper", new StandardSQLFunction("upper") ); - registerFunction( "power", new StandardSQLFunction( "power" ) ); - registerFunction( "stddev", new StandardSQLFunction( "stddev" ) ); - registerFunction( "variance", new StandardSQLFunction( "variance" ) ); - registerFunction( "round", new StandardSQLFunction( "round" ) ); - registerFunction( "trunc", new StandardSQLFunction( "trunc" ) ); - registerFunction( "ceil", new StandardSQLFunction( "ceil" ) ); - registerFunction( "floor", new StandardSQLFunction( "floor" ) ); - registerFunction( "ltrim", new StandardSQLFunction( "ltrim" ) ); - registerFunction( "rtrim", new StandardSQLFunction( "rtrim" ) ); - registerFunction( "nvl", new StandardSQLFunction( "nvl" ) ); - registerFunction( "nvl2", new StandardSQLFunction( "nvl2" ) ); - registerFunction( "sign", new StandardSQLFunction( "sign", StandardBasicTypes.INTEGER ) ); - registerFunction( "chr", new StandardSQLFunction( "chr", StandardBasicTypes.CHARACTER ) ); - registerFunction( "to_char", new StandardSQLFunction( "to_char", StandardBasicTypes.STRING ) ); - registerFunction( "to_date", new StandardSQLFunction( "to_date", StandardBasicTypes.TIMESTAMP ) ); - registerFunction( "last_day", new StandardSQLFunction( "last_day", StandardBasicTypes.DATE ) ); - registerFunction( "instr", new StandardSQLFunction( "instr", StandardBasicTypes.INTEGER ) ); - registerFunction( "instrb", new StandardSQLFunction( "instrb", StandardBasicTypes.INTEGER ) ); - registerFunction( "lpad", new StandardSQLFunction( "lpad", StandardBasicTypes.STRING ) ); - registerFunction( "replace", new StandardSQLFunction( "replace", StandardBasicTypes.STRING ) ); - registerFunction( "rpad", new StandardSQLFunction( "rpad", StandardBasicTypes.STRING ) ); - registerFunction( "substr", new StandardSQLFunction( "substr", StandardBasicTypes.STRING ) ); - registerFunction( "substrb", new StandardSQLFunction( "substrb", StandardBasicTypes.STRING ) ); - registerFunction( "translate", new StandardSQLFunction( "translate", StandardBasicTypes.STRING ) ); - registerFunction( "add_months", new StandardSQLFunction( "add_months", StandardBasicTypes.DATE ) ); - registerFunction( "months_between", new StandardSQLFunction( "months_between", StandardBasicTypes.FLOAT ) ); + registerFunction("abs", new StandardSQLFunction("abs") ); + registerFunction("sign", new StandardSQLFunction("sign", StandardBasicTypes.INTEGER) ); - registerFunction( "current_date", new NoArgSQLFunction( "current_date", StandardBasicTypes.DATE, false ) ); - registerFunction( "current_time", new NoArgSQLFunction( "current_time", StandardBasicTypes.TIME, false ) ); - registerFunction( - "current_timestamp", - new NoArgSQLFunction( "current_timestamp", StandardBasicTypes.TIMESTAMP, false ) - ); - registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.DATE, false ) ); - registerFunction( "systime", new NoArgSQLFunction( "systime", StandardBasicTypes.TIME, false ) ); - registerFunction( "systimestamp", new NoArgSQLFunction( "systimestamp", StandardBasicTypes.TIMESTAMP, false ) ); - registerFunction( "user", new NoArgSQLFunction( "user", StandardBasicTypes.STRING, false ) ); - registerFunction( "rownum", new NoArgSQLFunction( "rownum", StandardBasicTypes.LONG, false ) ); - registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "", "||", "" ) ); + registerFunction("acos", new StandardSQLFunction("acos", StandardBasicTypes.DOUBLE) ); + registerFunction("asin", new StandardSQLFunction("asin", StandardBasicTypes.DOUBLE) ); + registerFunction("atan", new StandardSQLFunction("atan", StandardBasicTypes.DOUBLE) ); + registerFunction("cos", new StandardSQLFunction("cos", StandardBasicTypes.DOUBLE) ); + registerFunction("cot", new StandardSQLFunction("cot", StandardBasicTypes.DOUBLE) ); + registerFunction("exp", new StandardSQLFunction("exp", StandardBasicTypes.DOUBLE) ); + registerFunction("ln", new StandardSQLFunction("ln", StandardBasicTypes.DOUBLE) ); + registerFunction("log2", new StandardSQLFunction("log2", StandardBasicTypes.DOUBLE) ); + registerFunction("log10", new StandardSQLFunction("log10", StandardBasicTypes.DOUBLE) ); + registerFunction("pi", new NoArgSQLFunction("pi", StandardBasicTypes.DOUBLE) ); + registerFunction("rand", new NoArgSQLFunction("rand", StandardBasicTypes.DOUBLE) ); + registerFunction("random", new NoArgSQLFunction("random", StandardBasicTypes.DOUBLE) ); + registerFunction("sin", new StandardSQLFunction("sin", StandardBasicTypes.DOUBLE) ); + registerFunction("sqrt", new StandardSQLFunction("sqrt", StandardBasicTypes.DOUBLE) ); + registerFunction("tan", new StandardSQLFunction("tan", StandardBasicTypes.DOUBLE) ); + + registerFunction("radians", new StandardSQLFunction("radians", StandardBasicTypes.DOUBLE) ); + registerFunction("degrees", new StandardSQLFunction("degrees", StandardBasicTypes.DOUBLE) ); + + registerFunction("ceil", new StandardSQLFunction("ceil", StandardBasicTypes.INTEGER) ); + registerFunction("floor", new StandardSQLFunction("floor", StandardBasicTypes.INTEGER) ); + registerFunction("round", new StandardSQLFunction("round") ); + + registerFunction("datediff", new StandardSQLFunction("datediff", StandardBasicTypes.INTEGER) ); + registerFunction("timediff", new StandardSQLFunction("timediff", StandardBasicTypes.TIME) ); + + registerFunction("date", new StandardSQLFunction("date", StandardBasicTypes.DATE) ); + registerFunction("curdate", new NoArgSQLFunction("curdate", StandardBasicTypes.DATE) ); + registerFunction("current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false) ); + registerFunction("sys_date", new NoArgSQLFunction("sys_date", StandardBasicTypes.DATE, false) ); + registerFunction("sysdate", new NoArgSQLFunction("sysdate", StandardBasicTypes.DATE, false) ); + + registerFunction("time", new StandardSQLFunction("time", StandardBasicTypes.TIME) ); + registerFunction("curtime", new NoArgSQLFunction("curtime", StandardBasicTypes.TIME) ); + registerFunction("current_time", new NoArgSQLFunction("current_time", StandardBasicTypes.TIME, false) ); + registerFunction("sys_time", new NoArgSQLFunction("sys_time", StandardBasicTypes.TIME, false) ); + registerFunction("systime", new NoArgSQLFunction("systime", StandardBasicTypes.TIME, false) ); + + registerFunction("timestamp", new StandardSQLFunction("timestamp", StandardBasicTypes.TIMESTAMP) ); + registerFunction("current_timestamp", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIMESTAMP, false) ); + registerFunction("sys_timestamp", new NoArgSQLFunction("sys_timestamp", StandardBasicTypes.TIMESTAMP, false) ); + registerFunction("systimestamp", new NoArgSQLFunction("systimestamp", StandardBasicTypes.TIMESTAMP, false) ); + registerFunction("localtime", new NoArgSQLFunction("localtime", StandardBasicTypes.TIMESTAMP, false) ); + registerFunction("localtimestamp", new NoArgSQLFunction("localtimestamp", StandardBasicTypes.TIMESTAMP, false) ); + + registerFunction("day", new StandardSQLFunction("day", StandardBasicTypes.INTEGER) ); + registerFunction("dayofmonth", new StandardSQLFunction("dayofmonth", StandardBasicTypes.INTEGER) ); + registerFunction("dayofweek", new StandardSQLFunction("dayofweek", StandardBasicTypes.INTEGER) ); + registerFunction("dayofyear", new StandardSQLFunction("dayofyear", StandardBasicTypes.INTEGER) ); + registerFunction("from_days", new StandardSQLFunction("from_days", StandardBasicTypes.DATE) ); + registerFunction("from_unixtime", new StandardSQLFunction("from_unixtime", StandardBasicTypes.TIMESTAMP) ); + registerFunction("last_day", new StandardSQLFunction("last_day", StandardBasicTypes.DATE) ); + registerFunction("minute", new StandardSQLFunction("minute", StandardBasicTypes.INTEGER) ); + registerFunction("month", new StandardSQLFunction("month", StandardBasicTypes.INTEGER) ); + registerFunction("months_between", new StandardSQLFunction("months_between", StandardBasicTypes.DOUBLE) ); + registerFunction("now", new NoArgSQLFunction("now", StandardBasicTypes.TIMESTAMP) ); + registerFunction("quarter", new StandardSQLFunction("quarter", StandardBasicTypes.INTEGER) ); + registerFunction("second", new StandardSQLFunction("second", StandardBasicTypes.INTEGER) ); + registerFunction("sec_to_time", new StandardSQLFunction("sec_to_time", StandardBasicTypes.TIME) ); + registerFunction("time_to_sec", new StandardSQLFunction("time_to_sec", StandardBasicTypes.INTEGER) ); + registerFunction("to_days", new StandardSQLFunction("to_days", StandardBasicTypes.LONG) ); + registerFunction("unix_timestamp", new StandardSQLFunction("unix_timestamp", StandardBasicTypes.LONG) ); + registerFunction("utc_date", new NoArgSQLFunction("utc_date", StandardBasicTypes.STRING) ); + registerFunction("utc_time", new NoArgSQLFunction("utc_time", StandardBasicTypes.STRING) ); + registerFunction("week", new StandardSQLFunction("week", StandardBasicTypes.INTEGER) ); + registerFunction("weekday", new StandardSQLFunction("weekday", StandardBasicTypes.INTEGER) ); + registerFunction("year", new StandardSQLFunction("year", StandardBasicTypes.INTEGER) ); + + registerFunction("hex", new StandardSQLFunction("hex", StandardBasicTypes.STRING) ); + + registerFunction("octet_length", new StandardSQLFunction("octet_length", StandardBasicTypes.LONG) ); + registerFunction("bit_length", new StandardSQLFunction("bit_length", StandardBasicTypes.LONG) ); + + registerFunction("bit_count", new StandardSQLFunction("bit_count", StandardBasicTypes.LONG) ); + registerFunction("md5", new StandardSQLFunction("md5", StandardBasicTypes.STRING) ); + + registerFunction( "concat", new StandardSQLFunction( "concat", StandardBasicTypes.STRING ) ); + + registerFunction("substring", new StandardSQLFunction("substring", StandardBasicTypes.STRING) ); + registerFunction("substr", new StandardSQLFunction("substr", StandardBasicTypes.STRING) ); + + registerFunction("length", new StandardSQLFunction("length", StandardBasicTypes.INTEGER) ); + registerFunction("bit_length",new StandardSQLFunction("bit_length", StandardBasicTypes.INTEGER) ); + registerFunction("coalesce", new StandardSQLFunction("coalesce") ); + registerFunction("nullif", new StandardSQLFunction("nullif") ); + registerFunction("mod", new StandardSQLFunction("mod") ); + + registerFunction("power", new StandardSQLFunction("power") ); + registerFunction("stddev", new StandardSQLFunction("stddev") ); + registerFunction("variance", new StandardSQLFunction("variance") ); + registerFunction("trunc", new StandardSQLFunction("trunc") ); + registerFunction("nvl", new StandardSQLFunction("nvl") ); + registerFunction("nvl2", new StandardSQLFunction("nvl2") ); + registerFunction("chr", new StandardSQLFunction("chr", StandardBasicTypes.CHARACTER)); + registerFunction("to_char", new StandardSQLFunction("to_char", StandardBasicTypes.STRING) ); + registerFunction("to_date", new StandardSQLFunction("to_date", StandardBasicTypes.TIMESTAMP)); + registerFunction("instr", new StandardSQLFunction("instr", StandardBasicTypes.INTEGER) ); + registerFunction("instrb", new StandardSQLFunction("instrb", StandardBasicTypes.INTEGER) ); + registerFunction("lpad", new StandardSQLFunction("lpad", StandardBasicTypes.STRING) ); + registerFunction("replace", new StandardSQLFunction("replace", StandardBasicTypes.STRING) ); + registerFunction("rpad", new StandardSQLFunction("rpad", StandardBasicTypes.STRING) ); + registerFunction("translate", new StandardSQLFunction("translate", StandardBasicTypes.STRING) ); + + registerFunction("add_months", new StandardSQLFunction("add_months", StandardBasicTypes.DATE) ); + registerFunction("user", new NoArgSQLFunction("user", StandardBasicTypes.STRING, false) ); + registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.LONG, false) ); + registerFunction("concat", new VarArgsSQLFunction(StandardBasicTypes.STRING, "", "||", "")); + + registerKeyword( "TYPE" ); + registerKeyword( "YEAR" ); + registerKeyword( "MONTH" ); + registerKeyword( "ALIAS" ); + registerKeyword( "VALUE" ); + registerKeyword( "FIRST" ); + registerKeyword( "ROLE" ); + registerKeyword( "CLASS" ); + registerKeyword( "BIT" ); + registerKeyword( "TIME" ); + registerKeyword( "QUERY" ); + registerKeyword( "DATE" ); + registerKeyword( "USER" ); + registerKeyword( "ACTION" ); + registerKeyword( "SYS_USER" ); + registerKeyword( "ZONE" ); + registerKeyword( "LANGUAGE" ); + registerKeyword( "DICTIONARY" ); + registerKeyword( "DATA" ); + registerKeyword( "TEST" ); + registerKeyword( "SUPERCLASS" ); + registerKeyword( "SECTION" ); + registerKeyword( "LOWER" ); + registerKeyword( "LIST" ); + registerKeyword( "OID" ); + registerKeyword( "DAY" ); + registerKeyword( "IF" ); + registerKeyword( "ATTRIBUTE" ); + registerKeyword( "STRING" ); + registerKeyword( "SEARCH" ); + } + + public boolean supportsIdentityColumns() { + return true; + } + + public String getIdentityInsertString() { + return "NULL"; + } + + public boolean supportsColumnCheck() { + return false; + } + + public boolean supportsPooledSequences() { + return true; + } + + public String getIdentitySelectString() { + return "select last_insert_id()"; } + protected String getIdentityColumnString() { + return "not null auto_increment"; //starts with 1, implicitly + } + + /* + * CUBRID supports "ADD [COLUMN | ATTRIBUTE]" + */ public String getAddColumnString() { return "add"; } @@ -143,50 +272,39 @@ public class CUBRIDDialect extends Dialect { return "drop serial " + sequenceName; } + public String getDropForeignKeyString() { + return " drop foreign key "; + } + + public boolean qualifyIndexName() { + return false; + } + public boolean supportsSequences() { return true; } + public boolean supportsExistsInSelect() { + return false; + } + public String getQuerySequencesString() { return "select name from db_serial"; } - public boolean dropConstraints() { - return false; - } - - public boolean supportsLimit() { - return true; - } - - public String getLimitString(String sql, boolean hasOffset) { - // CUBRID 8.3.0 support limit - return new StringBuilder( sql.length() + 20 ).append( sql ) - .append( hasOffset ? " limit ?, ?" : " limit ?" ).toString(); - } - - public boolean bindLimitParametersInReverseOrder() { - return true; - } - - public boolean useMaxForLimit() { - return true; - } - - public boolean forUpdateOfColumns() { - return true; - } - - public char closeQuote() { - return ']'; - } - + /** + * The character specific to this dialect used to close a quoted identifier. + * CUBRID supports square brackets (MSSQL style), backticks (MySQL style), + * as well as double quotes (Oracle style). + * + * @return The dialect's specific open quote character. + */ public char openQuote() { return '['; } - public boolean hasAlterTable() { - return false; + public char closeQuote() { + return ']'; } public String getForUpdateString() { @@ -197,23 +315,31 @@ public class CUBRIDDialect extends Dialect { return true; } - public boolean supportsCommentOn() { - return false; - } - - public boolean supportsTemporaryTables() { - return false; - } - public boolean supportsCurrentTimestampSelection() { return true; } public String getCurrentTimestampSelectString() { - return "select systimestamp from table({1}) as T(X)"; + return "select now()"; } public boolean isCurrentTimestampSelectStringCallable() { return false; } + + public boolean supportsEmptyInList() { + return false; + } + + public boolean supportsIfExistsBeforeTableName() { + return true; + } + + public boolean supportsTupleDistinctCounts() { + return false; + } + + public LimitHandler buildLimitHandler(String sql, RowSelection selection) { + return new CUBRIDLimitHandler( this, sql, selection ); + } } diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java index 04da5a447c..3bfa7a7695 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/DB2Dialect.java @@ -159,7 +159,7 @@ public class DB2Dialect extends Dialect { registerFunction( "substring", new StandardSQLFunction( "substr", StandardBasicTypes.STRING ) ); registerFunction( "bit_length", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "length(?1)*8" ) ); - registerFunction( "trim", new AnsiTrimEmulationFunction() ); + registerFunction( "trim", new SQLFunctionTemplate( StandardBasicTypes.STRING, "trim(?1 ?2 ?3 ?4)" ) ); registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "", "||", "" ) ); diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java index 288fabf6e5..d2374221f7 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java @@ -39,8 +39,6 @@ import java.util.Map; import java.util.Properties; import java.util.Set; -import org.jboss.logging.Logger; - import org.hibernate.HibernateException; import org.hibernate.LockMode; import org.hibernate.LockOptions; @@ -83,6 +81,11 @@ import org.hibernate.metamodel.spi.relational.Sequence; import org.hibernate.metamodel.spi.relational.Table; import org.hibernate.metamodel.spi.relational.UniqueKey; import org.hibernate.persister.entity.Lockable; +import org.hibernate.sql.ANSICaseFragment; +import org.hibernate.sql.ANSIJoinFragment; +import org.hibernate.sql.CaseFragment; +import org.hibernate.sql.ForUpdateFragment; +import org.hibernate.sql.JoinFragment; import org.hibernate.tool.schema.internal.StandardAuxiliaryDatabaseObjectExporter; import org.hibernate.tool.schema.internal.StandardForeignKeyExporter; import org.hibernate.tool.schema.internal.StandardIndexExporter; @@ -90,15 +93,10 @@ import org.hibernate.tool.schema.internal.StandardSequenceExporter; import org.hibernate.tool.schema.internal.StandardTableExporter; import org.hibernate.tool.schema.internal.StandardUniqueKeyExporter; import org.hibernate.tool.schema.spi.Exporter; -import org.hibernate.sql.ANSICaseFragment; -import org.hibernate.sql.ANSIJoinFragment; -import org.hibernate.sql.CaseFragment; -import org.hibernate.sql.ForUpdateFragment; -import org.hibernate.sql.JoinFragment; import org.hibernate.type.StandardBasicTypes; -import org.hibernate.type.descriptor.sql.BlobTypeDescriptor; import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; +import org.jboss.logging.Logger; /** * Represents a dialect of SQL implemented by a particular RDBMS. @@ -327,6 +325,23 @@ public abstract class Dialect implements ConversionContext { return getTypeName( code, Column.DEFAULT_LENGTH, Column.DEFAULT_PRECISION, Column.DEFAULT_SCALE ); } + public String cast(String value, int jdbcTypeCode, int length, int precision, int scale) { + if ( jdbcTypeCode == Types.CHAR ) { + return "cast(" + value + " as char(" + length + "))"; + } + else { + return "cast(" + value + "as " + getTypeName( jdbcTypeCode, length, precision, scale ) + ")"; + } + } + + public String cast(String value, int jdbcTypeCode, int length) { + return cast( value, jdbcTypeCode, length, Column.DEFAULT_PRECISION, Column.DEFAULT_SCALE ); + } + + public String cast(String value, int jdbcTypeCode, int precision, int scale) { + return cast( value, jdbcTypeCode, Column.DEFAULT_LENGTH, precision, scale ); + } + /** * Subclasses register a type name for the given type code and maximum * column length. $l in the type name with be replaced by the @@ -391,10 +406,6 @@ public abstract class Dialect implements ConversionContext { protected SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) { SqlTypeDescriptor descriptor; switch ( sqlCode ) { - case Types.BLOB: { - descriptor = useInputStreamToInsertBlob() ? BlobTypeDescriptor.STREAM_BINDING : null; - break; - } case Types.CLOB: { descriptor = useInputStreamToInsertBlob() ? ClobTypeDescriptor.STREAM_BINDING : null; break; @@ -617,7 +628,9 @@ public abstract class Dialect implements ConversionContext { // function support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ protected void registerFunction(String name, SQLFunction function) { - sqlFunctions.put( name, function ); + // HHH-7721: SQLFunctionRegistry expects all lowercase. Enforce, + // just in case a user's customer dialect uses mixed cases. + sqlFunctions.put( name.toLowerCase(), function ); } /** @@ -2419,4 +2432,15 @@ public abstract class Dialect implements ConversionContext { public int getInExpressionCountLimit() { return 0; } + + /** + * HHH-4635 + * Oracle expects all Lob values to be last in inserts and updates. + * + * @return boolean True of Lob values should be last, false if it + * does not matter. + */ + public boolean forceLobAsLastValue() { + return false; + } } diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java index 42338ef7a1..ca64af3dcd 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java @@ -27,8 +27,6 @@ import java.io.Serializable; import java.sql.SQLException; import java.sql.Types; -import org.jboss.logging.Logger; - import org.hibernate.JDBCException; import org.hibernate.LockMode; import org.hibernate.StaleObjectStateException; @@ -53,6 +51,7 @@ import org.hibernate.internal.util.JdbcExceptionHelper; import org.hibernate.internal.util.ReflectHelper; import org.hibernate.persister.entity.Lockable; import org.hibernate.type.StandardBasicTypes; +import org.jboss.logging.Logger; /** * An SQL dialect compatible with HSQLDB (HyperSQL). @@ -123,8 +122,8 @@ public class HSQLDialect extends Dialect { registerColumnType( Types.CLOB, "longvarchar" ); } else { - registerColumnType( Types.BLOB, "blob" ); - registerColumnType( Types.CLOB, "clob" ); + registerColumnType( Types.BLOB, "blob($l)" ); + registerColumnType( Types.CLOB, "clob($l)" ); } // aggregate functions @@ -244,7 +243,12 @@ public class HSQLDialect extends Dialect { } public String getForUpdateString() { - return ""; + if ( hsqldbVersion >= 20 ) { + return " for update"; + } + else { + return ""; + } } public boolean supportsUnique() { diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java index 40a5891f43..5dcbc459dc 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/Oracle8iDialect.java @@ -123,6 +123,7 @@ public class Oracle8iDialect extends Dialect { registerFunction( "acos", new StandardSQLFunction("acos", StandardBasicTypes.DOUBLE) ); registerFunction( "asin", new StandardSQLFunction("asin", StandardBasicTypes.DOUBLE) ); registerFunction( "atan", new StandardSQLFunction("atan", StandardBasicTypes.DOUBLE) ); + registerFunction( "bitand", new StandardSQLFunction("bitand") ); registerFunction( "cos", new StandardSQLFunction("cos", StandardBasicTypes.DOUBLE) ); registerFunction( "cosh", new StandardSQLFunction("cosh", StandardBasicTypes.DOUBLE) ); registerFunction( "exp", new StandardSQLFunction("exp", StandardBasicTypes.DOUBLE) ); @@ -570,5 +571,10 @@ public class Oracle8iDialect extends Dialect { public int getInExpressionCountLimit() { return PARAM_LIST_SIZE_LIMIT; } + + @Override + public boolean forceLobAsLastValue() { + return true; + } } diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java index a99661ee91..b3d36326d3 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/PostgreSQL81Dialect.java @@ -163,6 +163,10 @@ public class PostgreSQL81Dialect extends Dialect { SqlTypeDescriptor descriptor; switch ( sqlCode ) { case Types.BLOB: { + // Force BLOB binding. Otherwise, byte[] fields annotated + // with @Lob will attempt to use + // BlobTypeDescriptor.PRIMITIVE_ARRAY_BINDING. Since the + // dialect uses oid for Blobs, byte arrays cannot be used. descriptor = BlobTypeDescriptor.BLOB_BINDING; break; } @@ -462,4 +466,14 @@ public class PostgreSQL81Dialect extends Dialect { public boolean supportsRowValueConstructorSyntax() { return true; } + + @Override + public String getForUpdateNowaitString() { + return getForUpdateString() + " nowait "; + } + + @Override + public String getForUpdateNowaitString(String aliases) { + return getForUpdateString(aliases) + " nowait "; + } } diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java index 13f333933c..4064b2347a 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/SybaseDialect.java @@ -23,6 +23,11 @@ */ package org.hibernate.dialect; +import java.sql.Types; + +import org.hibernate.type.descriptor.sql.BlobTypeDescriptor; +import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; + /** * All Sybase dialects share an IN list size limit. @@ -40,4 +45,9 @@ public class SybaseDialect extends AbstractTransactSQLDialect { public int getInExpressionCountLimit() { return PARAM_LIST_SIZE_LIMIT; } + + @Override + protected SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) { + return sqlCode == Types.BLOB ? BlobTypeDescriptor.PRIMITIVE_ARRAY_BINDING : super.getSqlTypeDescriptorOverride( sqlCode ); + } } diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java b/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java index e013421538..9eec303f82 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/function/SQLFunctionRegistry.java @@ -38,7 +38,6 @@ public class SQLFunctionRegistry { } public SQLFunction findSQLFunction(String functionName) { - // TODO: lower casing done here. Was done "at random" before; maybe not needed at all ? String name = functionName.toLowerCase(); SQLFunction userFunction = userFunctions.get( name ); return userFunction != null @@ -47,7 +46,6 @@ public class SQLFunctionRegistry { } public boolean hasFunction(String functionName) { - // TODO: toLowerCase was not done before. Only used in Template. String name = functionName.toLowerCase(); return userFunctions.containsKey( name ) || dialect.getFunctions().containsKey( name ); } diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/pagination/CUBRIDLimitHandler.java b/hibernate-core/src/main/java/org/hibernate/dialect/pagination/CUBRIDLimitHandler.java new file mode 100644 index 0000000000..4ee34f42ae --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/dialect/pagination/CUBRIDLimitHandler.java @@ -0,0 +1,37 @@ +package org.hibernate.dialect.pagination; + +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.spi.RowSelection; + +/** + * Limit handler that delegates all operations to the underlying dialect. + * + * @author Esen Sagynov (kadishmal at gmail dot com) + */ +public class CUBRIDLimitHandler extends AbstractLimitHandler { + private final Dialect dialect; + + public CUBRIDLimitHandler(Dialect dialect, String sql, RowSelection selection) { + super( sql, selection ); + this.dialect = dialect; + } + + public boolean supportsLimit() { + return true; + } + + public String getProcessedSql() { + if (LimitHelper.useLimit(this, selection)) { + // useLimitOffset: whether "offset" is set or not; + // if set, use "LIMIT offset, row_count" syntax; + // if not, use "LIMIT row_count" + boolean useLimitOffset = LimitHelper.hasFirstRow(selection); + + return new StringBuilder(sql.length() + 20).append(sql) + .append(useLimitOffset ? " limit ?, ?" : " limit ?").toString(); + } + else { + return sql; // or return unaltered SQL + } + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java index ba4bb21c66..c9463b5659 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/Cascade.java @@ -252,9 +252,12 @@ public final class Cascade { loadedValue = null; } if ( loadedValue != null ) { - final String entityName = entry.getPersister().getEntityName(); + final EntityEntry valueEntry = eventSource + .getPersistenceContext().getEntry( + loadedValue ); + final String entityName = valueEntry.getPersister().getEntityName(); if ( LOG.isTraceEnabled() ) { - final Serializable id = entry.getPersister().getIdentifier( loadedValue, eventSource ); + final Serializable id = valueEntry.getPersister().getIdentifier( loadedValue, eventSource ); final String description = MessageHelper.infoString( entityName, id ); LOG.tracev( "Deleting orphaned entity instance: {0}", description ); } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java index 16311723da..2c6a840ba3 100755 --- a/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java @@ -25,8 +25,6 @@ package org.hibernate.engine.internal; import java.io.Serializable; -import org.jboss.logging.Logger; - import org.hibernate.AssertionFailure; import org.hibernate.HibernateException; import org.hibernate.collection.spi.PersistentCollection; @@ -41,6 +39,7 @@ import org.hibernate.internal.CoreMessageLogger; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.pretty.MessageHelper; import org.hibernate.type.CollectionType; +import org.jboss.logging.Logger; /** * Implements book-keeping for the collection persistence by reachability algorithm @@ -76,10 +75,8 @@ public final class Collections { if ( LOG.isDebugEnabled() && loadedPersister != null ) { LOG.debugf( "Collection dereferenced: %s", - MessageHelper.collectionInfoString( - loadedPersister, - entry.getLoadedKey(), - session.getFactory() + MessageHelper.collectionInfoString( loadedPersister, + coll, entry.getLoadedKey(), session ) ); } @@ -135,7 +132,9 @@ public final class Collections { if ( LOG.isDebugEnabled() ) { LOG.debugf( "Found collection with unloaded owner: %s", - MessageHelper.collectionInfoString( entry.getLoadedPersister(), entry.getLoadedKey(), session.getFactory() ) ); + MessageHelper.collectionInfoString( + entry.getLoadedPersister(), coll, + entry.getLoadedKey(), session ) ); } entry.setCurrentPersister( entry.getLoadedPersister() ); @@ -189,13 +188,13 @@ public final class Collections { if (LOG.isDebugEnabled()) { if (collection.wasInitialized()) LOG.debugf("Collection found: %s, was: %s (initialized)", - MessageHelper.collectionInfoString(persister, ce.getCurrentKey(), factory), - MessageHelper.collectionInfoString(ce.getLoadedPersister(), + MessageHelper.collectionInfoString(persister, collection, ce.getCurrentKey(), session), + MessageHelper.collectionInfoString(ce.getLoadedPersister(), collection, ce.getLoadedKey(), - factory)); + session)); else LOG.debugf("Collection found: %s, was: %s (uninitialized)", - MessageHelper.collectionInfoString(persister, ce.getCurrentKey(), factory), - MessageHelper.collectionInfoString(ce.getLoadedPersister(), ce.getLoadedKey(), factory)); + MessageHelper.collectionInfoString(persister, collection, ce.getCurrentKey(), session), + MessageHelper.collectionInfoString(ce.getLoadedPersister(), collection, ce.getLoadedKey(), session)); } prepareCollectionForUpdate( collection, ce, factory ); diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java index 55df82ce3b..d195654a15 100755 --- a/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/JoinHelper.java @@ -71,31 +71,46 @@ public final class JoinHelper { * be used in the join */ public static String[] getAliasedLHSColumnNames( - AssociationType type, - String alias, - int property, + AssociationType associationType, + String columnQualifier, + int propertyIndex, int begin, OuterJoinLoadable lhsPersister, - Mapping mapping - ) { - if ( type.useLHSPrimaryKey() ) { - return StringHelper.qualify( alias, lhsPersister.getIdentifierColumnNames() ); + Mapping mapping) { + if ( associationType.useLHSPrimaryKey() ) { + return StringHelper.qualify( columnQualifier, lhsPersister.getIdentifierColumnNames() ); } else { - String propertyName = type.getLHSPropertyName(); - if (propertyName==null) { - return ArrayHelper.slice( - lhsPersister.toColumns(alias, property), - begin, - type.getColumnSpan(mapping) - ); + String propertyName = associationType.getLHSPropertyName(); + if ( propertyName == null ) { + return ArrayHelper.slice( + toColumns( lhsPersister, columnQualifier, propertyIndex ), + begin, + associationType.getColumnSpan( mapping ) + ); } else { - return ( (PropertyMapping) lhsPersister ).toColumns(alias, propertyName); //bad cast + return ( (PropertyMapping) lhsPersister ).toColumns(columnQualifier, propertyName); //bad cast } } } - + + private static String[] toColumns(OuterJoinLoadable persister, String columnQualifier, int propertyIndex) { + if ( propertyIndex >= 0 ) { + return persister.toColumns( columnQualifier, propertyIndex ); + } + else { + final String[] cols = persister.getIdentifierColumnNames(); + final String[] result = new String[cols.length]; + + for ( int j = 0; j < cols.length; j++ ) { + result[j] = StringHelper.qualify( columnQualifier, cols[j] ); + } + + return result; + } + } + /** * Get the columns of the owning entity which are to * be used in the join @@ -116,8 +131,10 @@ public final class JoinHelper { if (propertyName==null) { //slice, to get the columns for this component //property - return ArrayHelper.slice( - lhsPersister.getSubclassPropertyColumnNames(property), + return ArrayHelper.slice( + property < 0 + ? lhsPersister.getIdentifierColumnNames() + : lhsPersister.getSubclassPropertyColumnNames(property), begin, type.getColumnSpan(mapping) ); @@ -131,11 +148,10 @@ public final class JoinHelper { } public static String getLHSTableName( - AssociationType type, - int property, - OuterJoinLoadable lhsPersister - ) { - if ( type.useLHSPrimaryKey() ) { + AssociationType type, + int propertyIndex, + OuterJoinLoadable lhsPersister) { + if ( type.useLHSPrimaryKey() || propertyIndex < 0 ) { return lhsPersister.getTableName(); } else { @@ -144,7 +160,7 @@ public final class JoinHelper { //if there is no property-ref, assume the join //is to the subclass table (ie. the table of the //subclass that the association belongs to) - return lhsPersister.getSubclassPropertyTableName(property); + return lhsPersister.getSubclassPropertyTableName(propertyIndex); } else { //handle a property-ref @@ -157,7 +173,7 @@ public final class JoinHelper { //assumes that the property-ref refers to a property of the subclass //table that the association belongs to (a reasonable guess) //TODO: fix this, add: OuterJoinLoadable.getSubclassPropertyTableName(String propertyName) - propertyRefTable = lhsPersister.getSubclassPropertyTableName(property); + propertyRefTable = lhsPersister.getSubclassPropertyTableName(propertyIndex); } return propertyRefTable; } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java index 6b7185df56..0af6499f14 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/StatefulPersistenceContext.java @@ -93,6 +93,8 @@ public class StatefulPersistenceContext implements PersistenceContext { private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, StatefulPersistenceContext.class.getName() ); + private static final boolean tracing = LOG.isTraceEnabled(); + public static final Object NO_ROW = new MarkerObject( "NO_ROW" ); private static final int INIT_COLL_SIZE = 8; @@ -893,6 +895,9 @@ public class StatefulPersistenceContext implements PersistenceContext { public void addUninitializedCollection(CollectionPersister persister, PersistentCollection collection, Serializable id) { CollectionEntry ce = new CollectionEntry(collection, persister, id, flushing); addCollection(collection, ce, id); + if ( persister.getBatchSize() > 1 ) { + getBatchFetchQueue().addBatchLoadableCollection( collection, ce ); + } } /** @@ -902,6 +907,9 @@ public class StatefulPersistenceContext implements PersistenceContext { public void addUninitializedDetachedCollection(CollectionPersister persister, PersistentCollection collection) { CollectionEntry ce = new CollectionEntry( persister, collection.getKey() ); addCollection( collection, ce, collection.getKey() ); + if ( persister.getBatchSize() > 1 ) { + getBatchFetchQueue().addBatchLoadableCollection( collection, ce ); + } } /** @@ -1003,7 +1011,9 @@ public class StatefulPersistenceContext implements PersistenceContext { @Override public void initializeNonLazyCollections() throws HibernateException { if ( loadCounter == 0 ) { - LOG.debug( "Initializing non-lazy collections" ); + if (tracing) + LOG.trace( "Initializing non-lazy collections" ); + //do this work only at the very highest level of the load loadCounter++; //don't let this method be called recursively try { @@ -1861,14 +1871,14 @@ public class StatefulPersistenceContext implements PersistenceContext { CachedNaturalIdValueSource source) { final NaturalIdRegionAccessStrategy naturalIdCacheAccessStrategy = persister.getNaturalIdCacheAccessStrategy(); final NaturalIdCacheKey naturalIdCacheKey = new NaturalIdCacheKey( naturalIdValues, persister, session ); - if (naturalIdCacheAccessStrategy.get(naturalIdCacheKey, session.getTimestamp()) != null) { - return; // prevent identical re-cachings - } final SessionFactoryImplementor factory = session.getFactory(); switch ( source ) { case LOAD: { + if (naturalIdCacheAccessStrategy.get(naturalIdCacheKey, session.getTimestamp()) != null) { + return; // prevent identical re-cachings + } final boolean put = naturalIdCacheAccessStrategy.putFromLoad( naturalIdCacheKey, id, @@ -1915,6 +1925,9 @@ public class StatefulPersistenceContext implements PersistenceContext { } case UPDATE: { final NaturalIdCacheKey previousCacheKey = new NaturalIdCacheKey( previousNaturalIdValues, persister, session ); + if (naturalIdCacheKey.equals(previousCacheKey)) { + return; // prevent identical re-caching, solves HHH-7309 + } final SoftLock removalLock = naturalIdCacheAccessStrategy.lockItem( previousCacheKey, null ); naturalIdCacheAccessStrategy.remove( previousCacheKey ); @@ -2078,6 +2091,15 @@ public class StatefulPersistenceContext implements PersistenceContext { public void cleanupFromSynchronizations() { naturalIdXrefDelegate.unStashInvalidNaturalIdReferences(); } + + @Override + public void handleEviction(Object object, EntityPersister persister, Serializable identifier) { + naturalIdXrefDelegate.removeNaturalIdCrossReference( + persister, + identifier, + findCachedNaturalId( persister, identifier ) + ); + } }; @Override diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java index a434f584ff..abfe8851ce 100755 --- a/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/TwoPhaseLoad.java @@ -281,19 +281,6 @@ public final class TwoPhaseLoad { session ); - if ( session.isEventSource() ) { - postLoadEvent.setEntity( entity ).setId( id ).setPersister( persister ); - - final EventListenerGroup listenerGroup = session - .getFactory() - .getServiceRegistry() - .getService( EventListenerRegistry.class ) - .getEventListenerGroup( EventType.POST_LOAD ); - for ( PostLoadEventListener listener : listenerGroup.listeners() ) { - listener.onPostLoad( postLoadEvent ); - } - } - if ( LOG.isDebugEnabled() ) { LOG.debugf( "Done materializing entity %s", @@ -305,6 +292,45 @@ public final class TwoPhaseLoad { factory.getStatisticsImplementor().loadEntity( persister.getEntityName() ); } } + + /** + * PostLoad cannot occur during initializeEntity, as that call occurs *before* + * the Set collections are added to the persistence context by Loader. + * Without the split, LazyInitializationExceptions can occur in the Entity's + * postLoad if it acts upon the collection. + * + * + * HHH-6043 + * + * @param entity + * @param session + * @param postLoadEvent + */ + public static void postLoad( + final Object entity, + final SessionImplementor session, + final PostLoadEvent postLoadEvent) { + + if ( session.isEventSource() ) { + final PersistenceContext persistenceContext + = session.getPersistenceContext(); + final EntityEntry entityEntry = persistenceContext.getEntry(entity); + final Serializable id = entityEntry.getId(); + + postLoadEvent.setEntity( entity ).setId( entityEntry.getId() ) + .setPersister( entityEntry.getPersister() ); + + final EventListenerGroup listenerGroup + = session + .getFactory() + .getServiceRegistry() + .getService( EventListenerRegistry.class ) + .getEventListenerGroup( EventType.POST_LOAD ); + for ( PostLoadEventListener listener : listenerGroup.listeners() ) { + listener.onPostLoad( postLoadEvent ); + } + } + } private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) { return ( session.getFactory().getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() && diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java index d4e463ffa7..2150d73c62 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/AbstractLobCreator.java @@ -31,16 +31,12 @@ import java.sql.Clob; * @author Steve Ebersole */ public abstract class AbstractLobCreator implements LobCreator { - /** - * {@inheritDoc} - */ + @Override public Blob wrap(Blob blob) { return SerializableBlobProxy.generateProxy( blob ); } - /** - * {@inheritDoc} - */ + @Override public Clob wrap(Clob clob) { if ( SerializableNClobProxy.isNClob( clob ) ) { return SerializableNClobProxy.generateProxy( clob ); diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/BinaryStream.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BinaryStream.java similarity index 91% rename from hibernate-core/src/main/java/org/hibernate/type/descriptor/BinaryStream.java rename to hibernate-core/src/main/java/org/hibernate/engine/jdbc/BinaryStream.java index 24ef16ce94..1e15d37fe3 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/BinaryStream.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BinaryStream.java @@ -21,7 +21,8 @@ * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ -package org.hibernate.type.descriptor; +package org.hibernate.engine.jdbc; + import java.io.InputStream; /** @@ -49,5 +50,10 @@ public interface BinaryStream { * * @return The input stream length */ - public int getLength(); + public long getLength(); + + /** + * Release any underlying resources. + */ + public void release(); } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java index 2f10a3f8aa..e5a7983085 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobImplementer.java @@ -23,11 +23,16 @@ */ package org.hibernate.engine.jdbc; - /** * Marker interface for non-contextually created {@link java.sql.Blob} instances.. * * @author Steve Ebersole */ public interface BlobImplementer { + /** + * Gets access to the data underlying this BLOB. + * + * @return Access to the underlying data. + */ + public BinaryStream getUnderlyingStream(); } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java index 4be9234a83..0bc01a7eee 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/BlobProxy.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationHandler; @@ -30,12 +31,12 @@ import java.lang.reflect.Proxy; import java.sql.Blob; import java.sql.SQLException; -import org.hibernate.type.descriptor.java.BinaryStreamImpl; +import org.hibernate.engine.jdbc.internal.BinaryStreamImpl; import org.hibernate.type.descriptor.java.DataHelper; /** - * Manages aspects of proxying {@link Blob Blobs} for non-contextual creation, including proxy creation and - * handling proxy invocations. + * Manages aspects of proxying {@link Blob} references for non-contextual creation, including proxy creation and + * handling proxy invocations. We use proxies here solely to avoid JDBC version incompatibilities. * * @author Gavin King * @author Steve Ebersole @@ -44,8 +45,7 @@ import org.hibernate.type.descriptor.java.DataHelper; public class BlobProxy implements InvocationHandler { private static final Class[] PROXY_INTERFACES = new Class[] { Blob.class, BlobImplementer.class }; - private InputStream stream; - private long length; + private BinaryStream binaryStream; private boolean needsReset = false; /** @@ -55,8 +55,7 @@ public class BlobProxy implements InvocationHandler { * @see #generateProxy(byte[]) */ private BlobProxy(byte[] bytes) { - this.stream = new BinaryStreamImpl( bytes ); - this.length = bytes.length; + binaryStream = new BinaryStreamImpl( bytes ); } /** @@ -67,17 +66,17 @@ public class BlobProxy implements InvocationHandler { * @see #generateProxy(java.io.InputStream, long) */ private BlobProxy(InputStream stream, long length) { - this.stream = stream; - this.length = length; + this.binaryStream = new StreamBackedBinaryStream( stream, length ); } private long getLength() { - return length; + return binaryStream.getLength(); } private InputStream getStream() throws SQLException { + InputStream stream = binaryStream.getInputStream(); try { - if (needsReset) { + if ( needsReset ) { stream.reset(); } } @@ -94,6 +93,7 @@ public class BlobProxy implements InvocationHandler { * @throws UnsupportedOperationException if any methods other than {@link Blob#length()} * or {@link Blob#getBinaryStream} are invoked. */ + @Override @SuppressWarnings({ "UnnecessaryBoxing" }) public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { final String methodName = method.getName(); @@ -102,6 +102,9 @@ public class BlobProxy implements InvocationHandler { if ( "length".equals( methodName ) && argCount == 0 ) { return Long.valueOf( getLength() ); } + if ( "getUnderlyingStream".equals( methodName ) ) { + return binaryStream; + } if ( "getBinaryStream".equals( methodName ) ) { if ( argCount == 0 ) { return getStream(); @@ -137,7 +140,7 @@ public class BlobProxy implements InvocationHandler { } } if ( "free".equals( methodName ) && argCount == 0 ) { - stream.close(); + binaryStream.release(); return null; } if ( "toString".equals( methodName ) && argCount == 0 ) { @@ -197,4 +200,43 @@ public class BlobProxy implements InvocationHandler { } return cl; } + + private static class StreamBackedBinaryStream implements BinaryStream { + private final InputStream stream; + private final long length; + + private byte[] bytes; + + private StreamBackedBinaryStream(InputStream stream, long length) { + this.stream = stream; + this.length = length; + } + + @Override + public InputStream getInputStream() { + return stream; + } + + @Override + public byte[] getBytes() { + if ( bytes == null ) { + bytes = DataHelper.extractBytes( stream ); + } + return bytes; + } + + @Override + public long getLength() { + return (int) length; + } + + @Override + public void release() { + try { + stream.close(); + } + catch (IOException ignore) { + } + } + } } diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/CharacterStream.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/CharacterStream.java similarity index 76% rename from hibernate-core/src/main/java/org/hibernate/type/descriptor/CharacterStream.java rename to hibernate-core/src/main/java/org/hibernate/engine/jdbc/CharacterStream.java index 4165b069c4..f90d6c3356 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/CharacterStream.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/CharacterStream.java @@ -21,7 +21,9 @@ * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ -package org.hibernate.type.descriptor; +package org.hibernate.engine.jdbc; + +import java.io.InputStream; import java.io.Reader; /** @@ -32,17 +34,28 @@ import java.io.Reader; */ public interface CharacterStream { /** - * Retrieve the reader. + * Provides access to the underlying data as a Reader. * * @return The reader. */ - public Reader getReader(); + public Reader asReader(); /** - * Retrieve the number of characters. JDBC 3 and earlier defined the length in terms of int type rather than - * long type :( + * Provides access to the underlying data as a String. + * + * @return The underlying String data + */ + public String asString(); + + /** + * Retrieve the number of characters. * * @return The number of characters. */ - public int getLength(); + public long getLength(); + + /** + * Release any underlying resources. + */ + public void release(); } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java index 4beed7a499..5fb4df2a7d 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobImplementer.java @@ -23,11 +23,16 @@ */ package org.hibernate.engine.jdbc; - /** * Marker interface for non-contextually created {@link java.sql.Clob} instances.. * * @author Steve Ebersole */ public interface ClobImplementer { + /** + * Gets access to the data underlying this CLOB. + * + * @return Access to the underlying data. + */ + public CharacterStream getUnderlyingStream(); } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java index 8a5c88b26d..0ddc9455d3 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ClobProxy.java @@ -33,11 +33,12 @@ import java.lang.reflect.Proxy; import java.sql.Clob; import java.sql.SQLException; +import org.hibernate.engine.jdbc.internal.CharacterStreamImpl; import org.hibernate.type.descriptor.java.DataHelper; /** * Manages aspects of proxying {@link Clob Clobs} for non-contextual creation, including proxy creation and - * handling proxy invocations. + * handling proxy invocations. We use proxies here solely to avoid JDBC version incompatibilities. * * @author Gavin King * @author Steve Ebersole @@ -46,12 +47,9 @@ import org.hibernate.type.descriptor.java.DataHelper; public class ClobProxy implements InvocationHandler { private static final Class[] PROXY_INTERFACES = new Class[] { Clob.class, ClobImplementer.class }; - private String string; - private Reader reader; - private long length; + private final CharacterStream characterStream; private boolean needsReset = false; - /** * Constructor used to build {@link Clob} from string data. * @@ -59,9 +57,7 @@ public class ClobProxy implements InvocationHandler { * @see #generateProxy(String) */ protected ClobProxy(String string) { - this.string = string; - reader = new StringReader(string); - length = string.length(); + this.characterStream = new CharacterStreamImpl( string ); } /** @@ -72,28 +68,25 @@ public class ClobProxy implements InvocationHandler { * @see #generateProxy(java.io.Reader, long) */ protected ClobProxy(Reader reader, long length) { - this.reader = reader; - this.length = length; + this.characterStream = new CharacterStreamImpl( reader, length ); } protected long getLength() { - return length; + return characterStream.getLength(); } protected InputStream getAsciiStream() throws SQLException { resetIfNeeded(); - return new ReaderInputStream( reader ); + return new ReaderInputStream( characterStream.asReader() ); } protected Reader getCharacterStream() throws SQLException { resetIfNeeded(); - return reader; + return characterStream.asReader(); } protected String getSubString(long start, int length) { - if ( string == null ) { - throw new UnsupportedOperationException( "Clob was not created from string; cannot substring" ); - } + final String string = characterStream.asString(); // semi-naive implementation int endIndex = Math.min( ((int)start)+length, string.length() ); return string.substring( (int)start, endIndex ); @@ -105,6 +98,7 @@ public class ClobProxy implements InvocationHandler { * @throws UnsupportedOperationException if any methods other than {@link Clob#length()}, * {@link Clob#getAsciiStream()}, or {@link Clob#getCharacterStream()} are invoked. */ + @Override @SuppressWarnings({ "UnnecessaryBoxing" }) public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { final String methodName = method.getName(); @@ -113,6 +107,9 @@ public class ClobProxy implements InvocationHandler { if ( "length".equals( methodName ) && argCount == 0 ) { return Long.valueOf( getLength() ); } + if ( "getUnderlyingStream".equals( methodName ) ) { + return characterStream; + } if ( "getAsciiStream".equals( methodName ) && argCount == 0 ) { return getAsciiStream(); } @@ -152,7 +149,7 @@ public class ClobProxy implements InvocationHandler { return getSubString( start-1, length ); } if ( "free".equals( methodName ) && argCount == 0 ) { - reader.close(); + characterStream.release(); return null; } if ( "toString".equals( methodName ) && argCount == 0 ) { @@ -171,7 +168,7 @@ public class ClobProxy implements InvocationHandler { protected void resetIfNeeded() throws SQLException { try { if ( needsReset ) { - reader.reset(); + characterStream.asReader().reset(); } } catch ( IOException ioe ) { diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java index ae27d712f5..3ed10eae56 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ContextualLobCreator.java @@ -59,9 +59,7 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat return lobCreationContext.execute( CREATE_BLOB_CALLBACK ); } - /** - * {@inheritDoc} - */ + @Override public Blob createBlob(byte[] bytes) { try { Blob blob = createBlob(); @@ -73,25 +71,11 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat } } - /** - * {@inheritDoc} - */ + @Override public Blob createBlob(InputStream inputStream, long length) { - try { - Blob blob = createBlob(); - OutputStream byteStream = blob.setBinaryStream( 1 ); - StreamUtils.copy( inputStream, byteStream ); - byteStream.flush(); - byteStream.close(); - // todo : validate length written versus length given? - return blob; - } - catch ( SQLException e ) { - throw new JDBCException( "Unable to prepare BLOB binary stream for writing",e ); - } - catch ( IOException e ) { - throw new HibernateException( "Unable to write stream contents to BLOB", e ); - } + // IMPL NOTE : it is inefficient to use JDBC LOB locator creation to create a LOB + // backed by a given stream. So just wrap the stream (which is what the NonContextualLobCreator does). + return NonContextualLobCreator.INSTANCE.createBlob( inputStream, length ); } /** @@ -103,9 +87,7 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat return lobCreationContext.execute( CREATE_CLOB_CALLBACK ); } - /** - * {@inheritDoc} - */ + @Override public Clob createClob(String string) { try { Clob clob = createClob(); @@ -117,24 +99,11 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat } } - /** - * {@inheritDoc} - */ + @Override public Clob createClob(Reader reader, long length) { - try { - Clob clob = createClob(); - Writer writer = clob.setCharacterStream( 1 ); - StreamUtils.copy( reader, writer ); - writer.flush(); - writer.close(); - return clob; - } - catch ( SQLException e ) { - throw new JDBCException( "Unable to prepare CLOB stream for writing", e ); - } - catch ( IOException e ) { - throw new HibernateException( "Unable to write CLOB stream content", e ); - } + // IMPL NOTE : it is inefficient to use JDBC LOB locator creation to create a LOB + // backed by a given stream. So just wrap the stream (which is what the NonContextualLobCreator does). + return NonContextualLobCreator.INSTANCE.createClob( reader, length ); } /** @@ -146,9 +115,7 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat return lobCreationContext.execute( CREATE_NCLOB_CALLBACK ); } - /** - * {@inheritDoc} - */ + @Override public NClob createNClob(String string) { try { NClob nclob = createNClob(); @@ -160,24 +127,11 @@ public class ContextualLobCreator extends AbstractLobCreator implements LobCreat } } - /** - * {@inheritDoc} - */ + @Override public NClob createNClob(Reader reader, long length) { - try { - NClob nclob = createNClob(); - Writer writer = nclob.setCharacterStream( 1 ); - StreamUtils.copy( reader, writer ); - writer.flush(); - writer.close(); - return nclob; - } - catch ( SQLException e ) { - throw new JDBCException( "Unable to prepare NCLOB stream for writing", e ); - } - catch ( IOException e ) { - throw new HibernateException( "Unable to write NCLOB stream content", e ); - } + // IMPL NOTE : it is inefficient to use JDBC LOB locator creation to create a LOB + // backed by a given stream. So just wrap the stream (which is what the NonContextualLobCreator does). + return NonContextualLobCreator.INSTANCE.createNClob( reader, length ); } public static final LobCreationContext.Callback CREATE_BLOB_CALLBACK = new LobCreationContext.Callback() { diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java index 74a8d72c7a..b1f6d39628 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java @@ -30,8 +30,6 @@ import java.sql.NClob; /** * Contract for creating various LOB references. - * - * @todo LobCreator really needs to be an api since we expose it to users. * * @author Steve Ebersole * @author Gail Badner diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java index cbd7ac3a15..715f57e767 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobProxy.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.io.Reader; import java.lang.reflect.Proxy; import java.sql.Clob; @@ -29,10 +30,10 @@ import java.sql.NClob; /** * Manages aspects of proxying java.sql.NClobs for non-contextual creation, including proxy creation and - * handling proxy invocations. + * handling proxy invocations. We use proxies here solely to avoid JDBC version incompatibilities. *

- * Generated proxies are typed as {@link java.sql.Clob} (java.sql.NClob extends {@link java.sql.Clob}) and in JDK 1.6 environments, they - * are also typed to java.sql.NClob + * Generated proxies are typed as {@link java.sql.Clob} (java.sql.NClob extends {@link java.sql.Clob}) + * and in JDK 1.6+ environments, they are also typed to java.sql.NClob * * @author Steve Ebersole */ diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java index 8b370e48a6..93b5ec9db0 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/NonContextualLobCreator.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.io.InputStream; import java.io.Reader; import java.sql.Blob; @@ -41,44 +42,32 @@ public class NonContextualLobCreator extends AbstractLobCreator implements LobCr private NonContextualLobCreator() { } - /** - * {@inheritDoc} - */ + @Override public Blob createBlob(byte[] bytes) { return BlobProxy.generateProxy( bytes ); } - /** - * {@inheritDoc} - */ + @Override public Blob createBlob(InputStream stream, long length) { return BlobProxy.generateProxy( stream, length ); } - /** - * {@inheritDoc} - */ + @Override public Clob createClob(String string) { return ClobProxy.generateProxy( string ); } - /** - * {@inheritDoc} - */ + @Override public Clob createClob(Reader reader, long length) { return ClobProxy.generateProxy( reader, length ); } - /** - * {@inheritDoc} - */ + @Override public NClob createNClob(String string) { return NClobProxy.generateProxy( string ); } - /** - * {@inheritDoc} - */ + @Override public NClob createNClob(Reader reader, long length) { return NClobProxy.generateProxy( reader, length ); } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java index 7728bd4bae..4c2fb9e248 100755 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/ReaderInputStream.java @@ -1,10 +1,10 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * - * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as + * Copyright (c) 2008, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Middleware LLC. + * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU @@ -20,7 +20,6 @@ * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA - * */ package org.hibernate.engine.jdbc; import java.io.IOException; @@ -42,5 +41,4 @@ public class ReaderInputStream extends InputStream { public int read() throws IOException { return reader.read(); } - } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java index 24d4b9f15b..c7f16de4a7 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableBlobProxy.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.io.Serializable; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; @@ -62,9 +63,7 @@ public class SerializableBlobProxy implements InvocationHandler, Serializable { } } - /** - * {@inheritDoc} - */ + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if ( "getWrappedBlob".equals( method.getName() ) ) { return getWrappedBlob(); diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java index 1d53e6bbd4..a092ff710a 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableClobProxy.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.io.Serializable; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; @@ -62,9 +63,7 @@ public class SerializableClobProxy implements InvocationHandler, Serializable { } } - /** - * {@inheritDoc} - */ + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if ( "getWrappedClob".equals( method.getName() ) ) { return getWrappedClob(); diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java index 954bb80d64..2314f92d3a 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/SerializableNClobProxy.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.lang.reflect.Proxy; import java.sql.Clob; diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java index dad827d309..205030b03e 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/StreamUtils.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java index 7b3e282728..0885e46697 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedBlob.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.sql.Blob; /** diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java index 8dfb72f78d..97f621d78b 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/WrappedClob.java @@ -22,6 +22,7 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.engine.jdbc; + import java.sql.Clob; /** diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java index 3b9d67da7b..6fd2f82c20 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/connections/internal/DriverManagerConnectionProviderImpl.java @@ -29,6 +29,7 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Map; import java.util.Properties; +import java.util.concurrent.atomic.AtomicInteger; import org.jboss.logging.Logger; @@ -68,7 +69,7 @@ public class DriverManagerConnectionProviderImpl private boolean autocommit; private final ArrayList pool = new ArrayList(); - private int checkedOut = 0; + private final AtomicInteger checkedOut = new AtomicInteger(); private boolean stopped; @@ -93,7 +94,7 @@ public class DriverManagerConnectionProviderImpl } public void configure(Map configurationValues) { - LOG.usingHibernateBuiltInConnectionPool(); + LOG.usingHibernateBuiltInConnectionPool(); String driverClassName = (String) configurationValues.get( AvailableSettings.DRIVER ); if ( driverClassName == null ) { @@ -127,18 +128,19 @@ public class DriverManagerConnectionProviderImpl } poolSize = ConfigurationHelper.getInt( AvailableSettings.POOL_SIZE, configurationValues, 20 ); // default pool size 20 - LOG.hibernateConnectionPoolSize(poolSize); + LOG.hibernateConnectionPoolSize( poolSize ); autocommit = ConfigurationHelper.getBoolean( AvailableSettings.AUTOCOMMIT, configurationValues ); - LOG.autoCommitMode( autocommit ); + LOG.autoCommitMode( autocommit ); isolation = ConfigurationHelper.getInteger( AvailableSettings.ISOLATION, configurationValues ); - if (isolation != null) LOG.jdbcIsolationLevel(Environment.isolationLevelToString(isolation.intValue())); + if ( isolation != null ) + LOG.jdbcIsolationLevel( Environment.isolationLevelToString( isolation.intValue() ) ); url = (String) configurationValues.get( AvailableSettings.URL ); if ( url == null ) { - String msg = LOG.jdbcUrlNotSpecified(AvailableSettings.URL); - LOG.error(msg); + String msg = LOG.jdbcUrlNotSpecified( AvailableSettings.URL ); + LOG.error( msg ); throw new HibernateException( msg ); } @@ -168,13 +170,14 @@ public class DriverManagerConnectionProviderImpl } public Connection getConnection() throws SQLException { - LOG.tracev( "Total checked-out connections: {0}", checkedOut ); + final boolean traceEnabled = LOG.isTraceEnabled(); + if ( traceEnabled ) LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() ); // essentially, if we have available connections in the pool, use one... synchronized (pool) { if ( !pool.isEmpty() ) { int last = pool.size() - 1; - LOG.tracev( "Using pooled JDBC connection, pool size: {0}", last ); + if ( traceEnabled ) LOG.tracev( "Using pooled JDBC connection, pool size: {0}", last ); Connection pooled = pool.remove( last ); if ( isolation != null ) { pooled.setTransactionIsolation( isolation.intValue() ); @@ -182,14 +185,16 @@ public class DriverManagerConnectionProviderImpl if ( pooled.getAutoCommit() != autocommit ) { pooled.setAutoCommit( autocommit ); } - checkedOut++; + checkedOut.incrementAndGet(); return pooled; } } // otherwise we open a new connection... - LOG.debug( "Opening new JDBC connection" ); + final boolean debugEnabled = LOG.isDebugEnabled(); + if ( debugEnabled ) LOG.debug( "Opening new JDBC connection" ); + Connection conn = DriverManager.getConnection( url, connectionProps ); if ( isolation != null ) { conn.setTransactionIsolation( isolation.intValue() ); @@ -198,23 +203,24 @@ public class DriverManagerConnectionProviderImpl conn.setAutoCommit(autocommit); } - if ( LOG.isDebugEnabled() ) { + if ( debugEnabled ) { LOG.debugf( "Created connection to: %s, Isolation Level: %s", url, conn.getTransactionIsolation() ); } - checkedOut++; + checkedOut.incrementAndGet(); return conn; } public void closeConnection(Connection conn) throws SQLException { - checkedOut--; + checkedOut.decrementAndGet(); + final boolean traceEnabled = LOG.isTraceEnabled(); // add to the pool if the max size is not yet reached. - synchronized (pool) { + synchronized ( pool ) { int currentSize = pool.size(); if ( currentSize < poolSize ) { - LOG.tracev( "Returning connection to pool, pool size: {0}", ( currentSize + 1 ) ); - pool.add(conn); + if ( traceEnabled ) LOG.tracev( "Returning connection to pool, pool size: {0}", ( currentSize + 1 ) ); + pool.add( conn ); return; } } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java index f26633f1ea..415d0b5462 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolver.java @@ -63,7 +63,7 @@ import org.hibernate.internal.CoreMessageLogger; public class StandardDialectResolver extends AbstractDialectResolver { private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, - StandardDialectResolver.class.getName()); + StandardDialectResolver.class.getName()); @Override protected Dialect resolveDialectInternal(DatabaseMetaData metaData) throws SQLException { @@ -88,7 +88,7 @@ public class StandardDialectResolver extends AbstractDialectResolver { if ( "PostgreSQL".equals( databaseName ) ) { final int databaseMinorVersion = metaData.getDatabaseMinorVersion(); - if (databaseMajorVersion >= 8 && databaseMinorVersion >= 2) { + if ( databaseMajorVersion > 8 || ( databaseMajorVersion == 8 && databaseMinorVersion >= 2 ) ) { return new PostgreSQL82Dialect(); } return new PostgreSQL81Dialect(); @@ -111,7 +111,7 @@ public class StandardDialectResolver extends AbstractDialectResolver { } if ( "ingres".equalsIgnoreCase( databaseName ) ) { - switch( databaseMajorVersion ) { + switch ( databaseMajorVersion ) { case 9: int databaseMinorVersion = metaData.getDatabaseMinorVersion(); if (databaseMinorVersion > 2) { @@ -133,6 +133,7 @@ public class StandardDialectResolver extends AbstractDialectResolver { case 9: return new SQLServer2005Dialect(); case 10: + case 11: return new SQLServer2008Dialect(); default: LOG.unknownSqlServerVersion(databaseMajorVersion); diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BinaryStreamImpl.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/BinaryStreamImpl.java similarity index 86% rename from hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BinaryStreamImpl.java rename to hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/BinaryStreamImpl.java index 078c0b25ad..b687105af4 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BinaryStreamImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/BinaryStreamImpl.java @@ -21,12 +21,13 @@ * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ -package org.hibernate.type.descriptor.java; +package org.hibernate.engine.jdbc.internal; import java.io.ByteArrayInputStream; +import java.io.IOException; import java.io.InputStream; -import org.hibernate.type.descriptor.BinaryStream; +import org.hibernate.engine.jdbc.BinaryStream; /** * Implementation of {@link BinaryStream} @@ -50,7 +51,16 @@ public class BinaryStreamImpl extends ByteArrayInputStream implements BinaryStre return buf; } - public int getLength() { + public long getLength() { return length; } + + @Override + public void release() { + try { + super.close(); + } + catch (IOException ignore) { + } + } } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/CharacterStreamImpl.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/CharacterStreamImpl.java new file mode 100644 index 0000000000..d61e07a4c5 --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/CharacterStreamImpl.java @@ -0,0 +1,87 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2010, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.engine.jdbc.internal; + +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.io.StringReader; + +import org.hibernate.engine.jdbc.CharacterStream; +import org.hibernate.type.descriptor.java.DataHelper; + +/** + * Implementation of {@link CharacterStream} + * + * @author Steve Ebersole + */ +public class CharacterStreamImpl implements CharacterStream { + private final long length; + + private Reader reader; + private String string; + + public CharacterStreamImpl(String chars) { + this.string = chars; + this.length = chars.length(); + } + + public CharacterStreamImpl(Reader reader, long length) { + this.reader = reader; + this.length = length; + } + + @Override + public Reader asReader() { + if ( reader == null ) { + reader = new StringReader( string ); + } + return reader; + } + + @Override + public String asString() { + if ( string == null ) { + string = DataHelper.extractString( reader ); + } + return string; + } + + @Override + public long getLength() { + return length; + } + + @Override + public void release() { + if ( reader == null ) { + return; + } + try { + reader.close(); + } + catch (IOException ignore) { + } + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java index 421a64422c..b1824d7e47 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/CollectionLoadContext.java @@ -31,8 +31,6 @@ import java.util.Iterator; import java.util.List; import java.util.Set; -import org.jboss.logging.Logger; - import org.hibernate.CacheMode; import org.hibernate.EntityMode; import org.hibernate.HibernateException; @@ -48,6 +46,7 @@ import org.hibernate.engine.spi.Status; import org.hibernate.internal.CoreMessageLogger; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.pretty.MessageHelper; +import org.jboss.logging.Logger; /** * Represents state associated with the processing of a given {@link ResultSet} @@ -253,7 +252,12 @@ public class CollectionLoadContext { } else { ce.postInitialize( lce.getCollection() ); +// if (ce.getLoadedPersister().getBatchSize() > 1) { // not the best place for doing this, moved into ce.postInitialize +// getLoadContext().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(ce); +// } } + + boolean addToCache = hasNoQueuedAdds && // there were no queued additions persister.hasCache() && // and the role has a cache @@ -266,7 +270,7 @@ public class CollectionLoadContext { if ( LOG.isDebugEnabled() ) { LOG.debugf( "Collection fully initialized: %s", - MessageHelper.collectionInfoString(persister, lce.getKey(), session.getFactory()) + MessageHelper.collectionInfoString(persister, lce.getCollection(), lce.getKey(), session) ); } if ( session.getFactory().getStatistics().isStatisticsEnabled() ) { @@ -285,7 +289,7 @@ public class CollectionLoadContext { final SessionFactoryImplementor factory = session.getFactory(); if ( LOG.isDebugEnabled() ) { - LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) ); + LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) ); } if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) { @@ -318,7 +322,7 @@ public class CollectionLoadContext { if ( collectionOwner == null ) { throw new HibernateException( "Unable to resolve owner of loading collection [" + - MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) + + MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) + "] for second level caching" ); } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java index ba56e22870..5a6e4a6cf2 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/loading/internal/LoadContexts.java @@ -201,11 +201,6 @@ public class LoadContexts { } return lce.getCollection(); } - // TODO : should really move this log statement to CollectionType, where this is used from... - if ( LOG.isTraceEnabled() ) { - LOG.tracef( "Creating collection wrapper: %s", - MessageHelper.collectionInfoString( persister, ownerKey, getSession().getFactory() ) ); - } return null; } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java index 0afe801f3d..37d5c8f7d5 100755 --- a/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/BatchFetchQueue.java @@ -25,15 +25,17 @@ package org.hibernate.engine.spi; import java.io.Serializable; import java.util.HashMap; -import java.util.Iterator; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.Map; +import java.util.Map.Entry; + +import org.jboss.logging.Logger; import org.hibernate.EntityMode; import org.hibernate.cache.spi.CacheKey; import org.hibernate.collection.spi.PersistentCollection; -import org.hibernate.internal.util.MarkerObject; -import org.hibernate.internal.util.collections.IdentityMap; +import org.hibernate.internal.CoreMessageLogger; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.entity.EntityPersister; @@ -43,33 +45,35 @@ import org.hibernate.persister.entity.EntityPersister; * can be re-used as a subquery for loading owned collections. * * @author Gavin King + * @author Steve Ebersole + * @author Guenther Demetz */ public class BatchFetchQueue { + private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, BatchFetchQueue.class.getName() ); - public static final Object MARKER = new MarkerObject( "MARKER" ); - - /** - * Defines a sequence of {@link EntityKey} elements that are currently - * elegible for batch-fetching. - *

- * Even though this is a map, we only use the keys. A map was chosen in - * order to utilize a {@link LinkedHashMap} to maintain sequencing - * as well as uniqueness. - *

- * TODO : this would be better as a SequencedReferenceSet, but no such beast exists! - */ - private final Map batchLoadableEntityKeys = new LinkedHashMap(8); + private final PersistenceContext context; /** * A map of {@link SubselectFetch subselect-fetch descriptors} keyed by the * {@link EntityKey) against which the descriptor is registered. */ - private final Map subselectsByEntityKey = new HashMap(8); + private final Map subselectsByEntityKey = new HashMap(8); /** - * The owning persistence context. + * Used to hold information about the entities that are currently eligible for batch-fetching. Ultimately + * used by {@link #getEntityBatch} to build entity load batches. + *

+ * A Map structure is used to segment the keys by entity type since loading can only be done for a particular entity + * type at a time. */ - private final PersistenceContext context; + private final Map > batchLoadableEntityKeys = new HashMap >(8); + + /** + * Used to hold information about the collections that are currently eligible for batch-fetching. Ultimately + * used by {@link #getCollectionBatch} to build collection load batches. + */ + private final Map> batchLoadableCollections = + new HashMap>(8); /** * Constructs a queue for the given context. @@ -85,9 +89,13 @@ public class BatchFetchQueue { */ public void clear() { batchLoadableEntityKeys.clear(); + batchLoadableCollections.clear(); subselectsByEntityKey.clear(); } + + // sub-select support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + /** * Retrieve the fetch descriptor associated with the given entity key. * @@ -96,7 +104,7 @@ public class BatchFetchQueue { * this entity key. */ public SubselectFetch getSubselect(EntityKey key) { - return (SubselectFetch) subselectsByEntityKey.get(key); + return subselectsByEntityKey.get( key ); } /** @@ -106,7 +114,7 @@ public class BatchFetchQueue { * @param subquery The fetch descriptor. */ public void addSubselect(EntityKey key, SubselectFetch subquery) { - subselectsByEntityKey.put(key, subquery); + subselectsByEntityKey.put( key, subquery ); } /** @@ -116,7 +124,7 @@ public class BatchFetchQueue { * need to load its collections) */ public void removeSubselect(EntityKey key) { - subselectsByEntityKey.remove(key); + subselectsByEntityKey.remove( key ); } /** @@ -128,6 +136,9 @@ public class BatchFetchQueue { subselectsByEntityKey.clear(); } + + // entity batch support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + /** * If an EntityKey represents a batch loadable entity, add * it to the queue. @@ -140,9 +151,15 @@ public class BatchFetchQueue { */ public void addBatchLoadableEntityKey(EntityKey key) { if ( key.isBatchLoadable() ) { - batchLoadableEntityKeys.put( key, MARKER ); + LinkedHashSet set = batchLoadableEntityKeys.get( key.getEntityName()); + if (set == null) { + set = new LinkedHashSet(8); + batchLoadableEntityKeys.put( key.getEntityName(), set); + } + set.add(key); } } + /** * After evicting or deleting or loading an entity, we don't @@ -150,69 +167,12 @@ public class BatchFetchQueue { * if necessary */ public void removeBatchLoadableEntityKey(EntityKey key) { - if ( key.isBatchLoadable() ) batchLoadableEntityKeys.remove(key); - } - - /** - * Get a batch of uninitialized collection keys for a given role - * - * @param collectionPersister The persister for the collection role. - * @param id A key that must be included in the batch fetch - * @param batchSize the maximum number of keys to return - * @return an array of collection keys, of length batchSize (padded with nulls) - */ - public Serializable[] getCollectionBatch( - final CollectionPersister collectionPersister, - final Serializable id, - final int batchSize) { - Serializable[] keys = new Serializable[batchSize]; - keys[0] = id; - int i = 1; - //int count = 0; - int end = -1; - boolean checkForEnd = false; - // this only works because collection entries are kept in a sequenced - // map by persistence context (maybe we should do like entities and - // keep a separate sequences set...) - - for ( Map.Entry me : - IdentityMap.concurrentEntries( (Map) context.getCollectionEntries() )) { - - CollectionEntry ce = me.getValue(); - PersistentCollection collection = me.getKey(); - if ( !collection.wasInitialized() && ce.getLoadedPersister() == collectionPersister ) { - - if ( checkForEnd && i == end ) { - return keys; //the first key found after the given key - } - - //if ( end == -1 && count > batchSize*10 ) return keys; //try out ten batches, max - - final boolean isEqual = collectionPersister.getKeyType().isEqual( - id, - ce.getLoadedKey(), - collectionPersister.getFactory() - ); - - if ( isEqual ) { - end = i; - //checkForEnd = false; - } - else if ( !isCached( ce.getLoadedKey(), collectionPersister ) ) { - keys[i++] = ce.getLoadedKey(); - //count++; - } - - if ( i == batchSize ) { - i = 1; //end of array, start filling again from start - if ( end != -1 ) { - checkForEnd = true; - } - } + if ( key.isBatchLoadable() ) { + LinkedHashSet set = batchLoadableEntityKeys.get( key.getEntityName()); + if (set != null) { + set.remove(key); } - } - return keys; //we ran out of keys to try } /** @@ -236,10 +196,11 @@ public class BatchFetchQueue { int end = -1; boolean checkForEnd = false; - Iterator iter = batchLoadableEntityKeys.keySet().iterator(); - while ( iter.hasNext() ) { - EntityKey key = (EntityKey) iter.next(); - if ( key.getEntityName().equals( persister.getEntityName() ) ) { //TODO: this needn't exclude subclasses... + // TODO: this needn't exclude subclasses... + + LinkedHashSet set = batchLoadableEntityKeys.get( persister.getEntityName() ); + if ( set != null ) { + for ( EntityKey key : set ) { if ( checkForEnd && i == end ) { //the first id found after the given id return ids; @@ -253,8 +214,10 @@ public class BatchFetchQueue { } } if ( i == batchSize ) { - i = 1; //end of array, start filling again from start - if (end!=-1) checkForEnd = true; + i = 1; // end of array, start filling again from start + if ( end != -1 ) { + checkForEnd = true; + } } } } @@ -272,6 +235,98 @@ public class BatchFetchQueue { } return false; } + + + // collection batch support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * If an CollectionEntry represents a batch loadable collection, add + * it to the queue. + */ + public void addBatchLoadableCollection(PersistentCollection collection, CollectionEntry ce) { + final CollectionPersister persister = ce.getLoadedPersister(); + + LinkedHashMap map = batchLoadableCollections.get( persister.getRole() ); + if ( map == null ) { + map = new LinkedHashMap( 16 ); + batchLoadableCollections.put( persister.getRole(), map ); + } + map.put( ce, collection ); + } + + /** + * After a collection was initialized or evicted, we don't + * need to batch fetch it anymore, remove it from the queue + * if necessary + */ + public void removeBatchLoadableCollection(CollectionEntry ce) { + LinkedHashMap map = batchLoadableCollections.get( ce.getLoadedPersister().getRole() ); + if ( map != null ) { + map.remove( ce ); + } + } + + /** + * Get a batch of uninitialized collection keys for a given role + * + * @param collectionPersister The persister for the collection role. + * @param id A key that must be included in the batch fetch + * @param batchSize the maximum number of keys to return + * @return an array of collection keys, of length batchSize (padded with nulls) + */ + public Serializable[] getCollectionBatch( + final CollectionPersister collectionPersister, + final Serializable id, + final int batchSize) { + + Serializable[] keys = new Serializable[batchSize]; + keys[0] = id; + + int i = 1; + int end = -1; + boolean checkForEnd = false; + + final LinkedHashMap map = batchLoadableCollections.get( collectionPersister.getRole() ); + if ( map != null ) { + for ( Entry me : map.entrySet() ) { + final CollectionEntry ce = me.getKey(); + final PersistentCollection collection = me.getValue(); + + if ( collection.wasInitialized() ) { + // should never happen + LOG.warn( "Encountered initialized collection in BatchFetchQueue, this should not happen." ); + continue; + } + + if ( checkForEnd && i == end ) { + return keys; //the first key found after the given key + } + + final boolean isEqual = collectionPersister.getKeyType().isEqual( + id, + ce.getLoadedKey(), + collectionPersister.getFactory() + ); + + if ( isEqual ) { + end = i; + //checkForEnd = false; + } + else if ( !isCached( ce.getLoadedKey(), collectionPersister ) ) { + keys[i++] = ce.getLoadedKey(); + //count++; + } + + if ( i == batchSize ) { + i = 1; //end of array, start filling again from start + if ( end != -1 ) { + checkForEnd = true; + } + } + } + } + return keys; //we ran out of keys to try + } private boolean isCached(Serializable collectionKey, CollectionPersister persister) { if ( persister.hasCache() ) { diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java index 1edb372459..fcd269acba 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/CollectionEntry.java @@ -34,6 +34,7 @@ import org.jboss.logging.Logger; import org.hibernate.AssertionFailure; import org.hibernate.HibernateException; import org.hibernate.MappingException; +import org.hibernate.collection.internal.AbstractPersistentCollection; import org.hibernate.collection.spi.PersistentCollection; import org.hibernate.internal.CoreMessageLogger; import org.hibernate.persister.collection.CollectionPersister; @@ -215,6 +216,9 @@ public final class CollectionEntry implements Serializable { collection.getSnapshot( getLoadedPersister() ) : null; collection.setSnapshot(loadedKey, role, snapshot); + if (getLoadedPersister().getBatchSize() > 1) { + ((AbstractPersistentCollection) collection).getSession().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(this); + } } /** @@ -260,6 +264,20 @@ public final class CollectionEntry implements Serializable { return snapshot; } + /** + * Reset the stored snapshot for both the persistent collection and this collection entry. + * Used during the merge of detached collections. + * + * @param collection the persistentcollection to be updated + * @param storedSnapshot the new stored snapshot + */ + public void resetStoredSnapshot(PersistentCollection collection, Serializable storedSnapshot) { + LOG.debugf("Reset storedSnapshot to %s for %s", storedSnapshot, this); + + snapshot = storedSnapshot; + collection.setSnapshot(loadedKey, role, snapshot); + } + private void setLoadedPersister(CollectionPersister persister) { loadedPersister = persister; setRole( persister == null ? null : persister.getRole() ); @@ -418,4 +436,4 @@ public final class CollectionEntry implements Serializable { ( session == null ? null : session.getFactory() ) ); } -} \ No newline at end of file +} diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java index e245d80695..05ee64c067 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/EntityEntry.java @@ -270,8 +270,14 @@ public final class EntityEntry implements Serializable { } public Object getLoadedValue(String propertyName) { - int propertyIndex = ( (UniqueKeyLoadable) persister ).getPropertyIndex(propertyName); - return loadedState[propertyIndex]; + if ( loadedState == null ) { + return null; + } + else { + int propertyIndex = ( (UniqueKeyLoadable) persister ) + .getPropertyIndex( propertyName ); + return loadedState[propertyIndex]; + } } /** diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java index 66f2643eb4..c5659ad956 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/PersistenceContext.java @@ -830,6 +830,15 @@ public interface PersistenceContext { * of old values as no longer valid. */ public void cleanupFromSynchronizations(); + + /** + * Called on {@link org.hibernate.Session#evict} to give a chance to clean up natural-id cross refs. + * + * @param object The entity instance. + * @param persister The entity persister + * @param identifier The entity identifier + */ + public void handleEviction(Object object, EntityPersister persister, Serializable identifier); } /** diff --git a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java index 8bf320502b..1c58989865 100644 --- a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java +++ b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultEvictEventListener.java @@ -80,7 +80,7 @@ public class DefaultEvictEventListener implements EvictEventListener { if ( !li.isUninitialized() ) { final Object entity = persistenceContext.removeEntity( key ); if ( entity != null ) { - EntityEntry e = event.getSession().getPersistenceContext().removeEntry( entity ); + EntityEntry e = persistenceContext.removeEntry( entity ); doEvict( entity, key, e.getPersister(), event.getSession() ); } } @@ -106,6 +106,10 @@ public class DefaultEvictEventListener implements EvictEventListener { LOG.tracev( "Evicting {0}", MessageHelper.infoString( persister ) ); } + if ( persister.hasNaturalIdentifier() ) { + session.getPersistenceContext().getNaturalIdHelper().handleEviction( object, persister, key.getIdentifier() ); + } + // remove all collections for the entity from the session-level cache if ( persister.hasCollections() ) { new EvictVisitor( session ).process( object, persister ); diff --git a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java index 69ce7ce476..033682d5df 100755 --- a/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java +++ b/hibernate-core/src/main/java/org/hibernate/event/internal/DefaultInitializeCollectionEventListener.java @@ -25,8 +25,6 @@ package org.hibernate.event.internal; import java.io.Serializable; -import org.jboss.logging.Logger; - import org.hibernate.HibernateException; import org.hibernate.cache.spi.CacheKey; import org.hibernate.cache.spi.entry.CollectionCacheEntry; @@ -40,6 +38,7 @@ import org.hibernate.event.spi.InitializeCollectionEventListener; import org.hibernate.internal.CoreMessageLogger; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.pretty.MessageHelper; +import org.jboss.logging.Logger; /** * @author Gavin King @@ -62,8 +61,7 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle if ( !collection.wasInitialized() ) { if ( LOG.isTraceEnabled() ) { LOG.tracev( "Initializing collection {0}", - MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(), - source.getFactory() ) ); + MessageHelper.collectionInfoString( ce.getLoadedPersister(), collection, ce.getLoadedKey(), source ) ); } LOG.trace( "Checking second-level cache" ); diff --git a/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java b/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java index dcc18fa019..e05487ed1a 100644 --- a/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java +++ b/hibernate-core/src/main/java/org/hibernate/event/internal/EvictVisitor.java @@ -79,8 +79,12 @@ public class EvictVisitor extends AbstractVisitor { if ( LOG.isDebugEnabled() ) { LOG.debugf( "Evicting collection: %s", MessageHelper.collectionInfoString( ce.getLoadedPersister(), + collection, ce.getLoadedKey(), - getSession().getFactory() ) ); + getSession() ) ); + } + if (ce.getLoadedPersister() != null && ce.getLoadedPersister().getBatchSize() > 1) { + getSession().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(ce); } if ( ce.getLoadedPersister() != null && ce.getLoadedKey() != null ) { //TODO: is this 100% correct? diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java index 66403b0adf..b9f0bbd8e5 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/HqlParser.java @@ -383,11 +383,15 @@ public final class HqlParser extends HqlBaseParser { @Override public void processMemberOf(Token n, AST p, ASTPair currentAST) { - AST inAst = n == null ? astFactory.create( IN, "in" ) : astFactory.create( NOT_IN, "not in" ); - astFactory.makeASTRoot( currentAST, inAst ); - AST ast = createSubquery( p ); - ast = ASTUtil.createParent( astFactory, IN_LIST, "inList", ast ); - inAst.addChild( ast ); + // convert MEMBER OF to the equivalent IN ELEMENTS structure... + AST inNode = n == null ? astFactory.create( IN, "in" ) : astFactory.create( NOT_IN, "not in" ); + astFactory.makeASTRoot( currentAST, inNode ); + + AST inListNode = astFactory.create( IN_LIST, "inList" ); + inNode.addChild( inListNode ); + AST elementsNode = astFactory.create( ELEMENTS, "elements" ); + inListNode.addChild( elementsNode ); + elementsNode.addChild( p ); } static public void panic() { diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java index 04b7feb2e0..2c2aeaf164 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/BasicExecutor.java @@ -31,9 +31,12 @@ import java.util.List; import antlr.RecognitionException; import org.hibernate.HibernateException; +import org.hibernate.action.internal.BulkOperationCleanupAction; import org.hibernate.engine.spi.QueryParameters; import org.hibernate.engine.spi.RowSelection; +import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.event.spi.EventSource; import org.hibernate.hql.internal.ast.HqlSqlWalker; import org.hibernate.hql.internal.ast.QuerySyntaxException; import org.hibernate.hql.internal.ast.SqlGenerator; @@ -45,17 +48,17 @@ import org.hibernate.persister.entity.Queryable; * * @author Steve Ebersole */ -public class BasicExecutor extends AbstractStatementExecutor { - +public class BasicExecutor implements StatementExecutor { + private final SessionFactoryImplementor factory; private final Queryable persister; private final String sql; private final List parameterSpecifications; public BasicExecutor(HqlSqlWalker walker, Queryable persister) { - super(walker, null); + this.factory = walker.getSessionFactoryHelper().getFactory(); this.persister = persister; try { - SqlGenerator gen = new SqlGenerator( getFactory() ); + SqlGenerator gen = new SqlGenerator( factory ); gen.statement( walker.getAST() ); sql = gen.getSQL(); gen.getParseErrorHandler().throwQueryException(); @@ -71,8 +74,13 @@ public class BasicExecutor extends AbstractStatementExecutor { } public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { - - coordinateSharedCacheCleanup( session ); + BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, persister ); + if ( session.isEventSource() ) { + ( (EventSource) session ).getActionQueue().addAction( action ); + } + else { + action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session ); + } PreparedStatement st = null; RowSelection selection = parameters.getRowSelection(); @@ -101,16 +109,7 @@ public class BasicExecutor extends AbstractStatementExecutor { } } catch( SQLException sqle ) { - throw getFactory().getSQLExceptionHelper().convert( - sqle, - "could not execute update query", - sql - ); + throw factory.getSQLExceptionHelper().convert( sqle, "could not execute update query", sql ); } } - - @Override - protected Queryable[] getAffectedQueryables() { - return new Queryable[] { persister }; - } } diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java index 5c2cb36ed6..b0b5a75e22 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableDeleteExecutor.java @@ -1,10 +1,10 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * - * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as + * Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Middleware LLC. + * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU @@ -20,147 +20,46 @@ * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA - * */ package org.hibernate.hql.internal.ast.exec; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.util.Iterator; - -import org.jboss.logging.Logger; - import org.hibernate.HibernateException; +import org.hibernate.action.internal.BulkOperationCleanupAction; import org.hibernate.engine.spi.QueryParameters; import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.event.spi.EventSource; import org.hibernate.hql.internal.ast.HqlSqlWalker; -import org.hibernate.hql.internal.ast.tree.DeleteStatement; -import org.hibernate.hql.internal.ast.tree.FromElement; -import org.hibernate.internal.CoreMessageLogger; -import org.hibernate.internal.util.StringHelper; -import org.hibernate.param.ParameterSpecification; -import org.hibernate.persister.entity.Queryable; -import org.hibernate.sql.Delete; +import org.hibernate.hql.spi.MultiTableBulkIdStrategy; /** * Implementation of MultiTableDeleteExecutor. * * @author Steve Ebersole */ -public class MultiTableDeleteExecutor extends AbstractStatementExecutor { - - private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, - MultiTableDeleteExecutor.class.getName()); - - private final Queryable persister; - private final String idInsertSelect; - private final String[] deletes; +public class MultiTableDeleteExecutor implements StatementExecutor { + private final MultiTableBulkIdStrategy.DeleteHandler deleteHandler; public MultiTableDeleteExecutor(HqlSqlWalker walker) { - super(walker, null); - - if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) { - throw new HibernateException( "cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" ); - } - - DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST(); - FromElement fromElement = deleteStatement.getFromClause().getFromElement(); - String bulkTargetAlias = fromElement.getTableAlias(); - this.persister = fromElement.getQueryable(); - - this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, deleteStatement.getWhereClause() ); - LOG.tracev( "Generated ID-INSERT-SELECT SQL (multi-table delete) : {0}", idInsertSelect ); - - String[] tableNames = persister.getConstraintOrderedTableNameClosure(); - String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure(); - String idSubselect = generateIdSubselect( persister ); - - deletes = new String[tableNames.length]; - for ( int i = tableNames.length - 1; i >= 0; i-- ) { - // TODO : an optimization here would be to consider cascade deletes and not gen those delete statements; - // the difficulty is the ordering of the tables here vs the cascade attributes on the persisters -> - // the table info gotten here should really be self-contained (i.e., a class representation - // defining all the needed attributes), then we could then get an array of those - final Delete delete = new Delete() - .setTableName( tableNames[i] ) - .setWhere( "(" + StringHelper.join( ", ", columnNames[i] ) + ") IN (" + idSubselect + ")" ); - if ( getFactory().getSettings().isCommentsEnabled() ) { - delete.setComment( "bulk delete" ); - } - - deletes[i] = delete.toStatementString(); - } + MultiTableBulkIdStrategy strategy = walker.getSessionFactoryHelper() + .getFactory() + .getSettings() + .getMultiTableBulkIdStrategy(); + this.deleteHandler = strategy.buildDeleteHandler( walker.getSessionFactoryHelper().getFactory(), walker ); } public String[] getSqlStatements() { - return deletes; + return deleteHandler.getSqlStatements(); } public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { - coordinateSharedCacheCleanup( session ); - - createTemporaryTableIfNecessary( persister, session ); - - try { - // First, save off the pertinent ids, saving the number of pertinent ids for return - PreparedStatement ps = null; - int resultCount = 0; - try { - try { - ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false ); - Iterator paramSpecifications = getIdSelectParameterSpecifications().iterator(); - int pos = 1; - while ( paramSpecifications.hasNext() ) { - final ParameterSpecification paramSpec = ( ParameterSpecification ) paramSpecifications.next(); - pos += paramSpec.bind( ps, parameters, session, pos ); - } - resultCount = ps.executeUpdate(); - } - finally { - if ( ps != null ) { - ps.close(); - } - } - } - catch( SQLException e ) { - throw getFactory().getSQLExceptionHelper().convert( - e, - "could not insert/select ids for bulk delete", - idInsertSelect - ); - } - - // Start performing the deletes - for ( int i = 0; i < deletes.length; i++ ) { - try { - try { - ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( deletes[i], false ); - ps.executeUpdate(); - } - finally { - if ( ps != null ) { - ps.close(); - } - } - } - catch( SQLException e ) { - throw getFactory().getSQLExceptionHelper().convert( - e, - "error performing bulk delete", - deletes[i] - ); - } - } - - return resultCount; + BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, deleteHandler.getTargetedQueryable() ); + if ( session.isEventSource() ) { + ( (EventSource) session ).getActionQueue().addAction( action ); } - finally { - dropTemporaryTableIfNecessary( persister, session ); + else { + action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session ); } - } - @Override - protected Queryable[] getAffectedQueryables() { - return new Queryable[] { persister }; + return deleteHandler.execute( session, parameters ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java index b5168bb3cb..b78afe8f3f 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/MultiTableUpdateExecutor.java @@ -24,178 +24,44 @@ */ package org.hibernate.hql.internal.ast.exec; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import org.jboss.logging.Logger; - import org.hibernate.HibernateException; +import org.hibernate.action.internal.BulkOperationCleanupAction; import org.hibernate.engine.spi.QueryParameters; import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.event.spi.EventSource; import org.hibernate.hql.internal.ast.HqlSqlWalker; -import org.hibernate.hql.internal.ast.tree.AssignmentSpecification; -import org.hibernate.hql.internal.ast.tree.FromElement; -import org.hibernate.hql.internal.ast.tree.UpdateStatement; -import org.hibernate.internal.CoreMessageLogger; -import org.hibernate.internal.util.StringHelper; -import org.hibernate.param.ParameterSpecification; -import org.hibernate.persister.entity.Queryable; -import org.hibernate.sql.Update; +import org.hibernate.hql.spi.MultiTableBulkIdStrategy; /** * Implementation of MultiTableUpdateExecutor. * * @author Steve Ebersole */ -public class MultiTableUpdateExecutor extends AbstractStatementExecutor { - - private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, - MultiTableUpdateExecutor.class.getName()); - - private final Queryable persister; - private final String idInsertSelect; - private final String[] updates; - private final ParameterSpecification[][] hqlParameters; +public class MultiTableUpdateExecutor implements StatementExecutor { + private final MultiTableBulkIdStrategy.UpdateHandler updateHandler; public MultiTableUpdateExecutor(HqlSqlWalker walker) { - super(walker, null); - - if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) { - throw new HibernateException( "cannot doAfterTransactionCompletion multi-table updates using dialect not supporting temp tables" ); - } - - UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST(); - FromElement fromElement = updateStatement.getFromClause().getFromElement(); - String bulkTargetAlias = fromElement.getTableAlias(); - this.persister = fromElement.getQueryable(); - - this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, updateStatement.getWhereClause() ); - LOG.tracev( "Generated ID-INSERT-SELECT SQL (multi-table update) : {0}", idInsertSelect ); - - String[] tableNames = persister.getConstraintOrderedTableNameClosure(); - String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure(); - - String idSubselect = generateIdSubselect( persister ); - List assignmentSpecifications = walker.getAssignmentSpecifications(); - - updates = new String[tableNames.length]; - hqlParameters = new ParameterSpecification[tableNames.length][]; - for ( int tableIndex = 0; tableIndex < tableNames.length; tableIndex++ ) { - boolean affected = false; - List parameterList = new ArrayList(); - Update update = new Update( getFactory().getDialect() ) - .setTableName( tableNames[tableIndex] ) - .setWhere( "(" + StringHelper.join( ", ", columnNames[tableIndex] ) + ") IN (" + idSubselect + ")" ); - if ( getFactory().getSettings().isCommentsEnabled() ) { - update.setComment( "bulk update" ); - } - final Iterator itr = assignmentSpecifications.iterator(); - while ( itr.hasNext() ) { - final AssignmentSpecification specification = ( AssignmentSpecification ) itr.next(); - if ( specification.affectsTable( tableNames[tableIndex] ) ) { - affected = true; - update.appendAssignmentFragment( specification.getSqlAssignmentFragment() ); - if ( specification.getParameters() != null ) { - for ( int paramIndex = 0; paramIndex < specification.getParameters().length; paramIndex++ ) { - parameterList.add( specification.getParameters()[paramIndex] ); - } - } - } - } - if ( affected ) { - updates[tableIndex] = update.toStatementString(); - hqlParameters[tableIndex] = ( ParameterSpecification[] ) parameterList.toArray( new ParameterSpecification[0] ); - } - } - } - - public Queryable getAffectedQueryable() { - return persister; + MultiTableBulkIdStrategy strategy = walker.getSessionFactoryHelper() + .getFactory() + .getSettings() + .getMultiTableBulkIdStrategy(); + this.updateHandler = strategy.buildUpdateHandler( walker.getSessionFactoryHelper().getFactory(), walker ); } public String[] getSqlStatements() { - return updates; + return updateHandler.getSqlStatements(); } public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { - coordinateSharedCacheCleanup( session ); + BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, updateHandler.getTargetedQueryable() ); - createTemporaryTableIfNecessary( persister, session ); - - try { - // First, save off the pertinent ids, as the return value - PreparedStatement ps = null; - int resultCount = 0; - try { - try { - ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false ); -// int parameterStart = getWalker().getNumberOfParametersInSetClause(); -// List allParams = getIdSelectParameterSpecifications(); -// Iterator whereParams = allParams.subList( parameterStart, allParams.size() ).iterator(); - Iterator whereParams = getIdSelectParameterSpecifications().iterator(); - int sum = 1; // jdbc params are 1-based - while ( whereParams.hasNext() ) { - sum += ( ( ParameterSpecification ) whereParams.next() ).bind( ps, parameters, session, sum ); - } - resultCount = ps.executeUpdate(); - } - finally { - if ( ps != null ) { - ps.close(); - } - } - } - catch( SQLException e ) { - throw getFactory().getSQLExceptionHelper().convert( - e, - "could not insert/select ids for bulk update", - idInsertSelect - ); - } - - // Start performing the updates - for ( int i = 0; i < updates.length; i++ ) { - if ( updates[i] == null ) { - continue; - } - try { - try { - ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( updates[i], false ); - if ( hqlParameters[i] != null ) { - int position = 1; // jdbc params are 1-based - for ( int x = 0; x < hqlParameters[i].length; x++ ) { - position += hqlParameters[i][x].bind( ps, parameters, session, position ); - } - } - ps.executeUpdate(); - } - finally { - if ( ps != null ) { - ps.close(); - } - } - } - catch( SQLException e ) { - throw getFactory().getSQLExceptionHelper().convert( - e, - "error performing bulk update", - updates[i] - ); - } - } - - return resultCount; + if ( session.isEventSource() ) { + ( (EventSource) session ).getActionQueue().addAction( action ); } - finally { - dropTemporaryTableIfNecessary( persister, session ); + else { + action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session ); } - } - @Override - protected Queryable[] getAffectedQueryables() { - return new Queryable[] { persister }; + return updateHandler.execute( session, parameters ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java index 152eedfb80..f83615af1b 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/BinaryLogicOperatorNode.java @@ -23,6 +23,8 @@ */ package org.hibernate.hql.internal.ast.tree; +import java.util.Arrays; + import antlr.SemanticException; import antlr.collections.AST; @@ -191,9 +193,7 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO protected static String[] extractMutationTexts(Node operand, int count) { if ( operand instanceof ParameterNode ) { String[] rtn = new String[count]; - for ( int i = 0; i < count; i++ ) { - rtn[i] = "?"; - } + Arrays.fill( rtn, "?" ); return rtn; } else if ( operand.getType() == HqlSqlTokenTypes.VECTOR_EXPR ) { diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java index 7be415278f..6bb5e12421 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/tree/InLogicOperatorNode.java @@ -121,50 +121,70 @@ public class InLogicOperatorNode extends BinaryLogicOperatorNode implements Bina || ( !ParameterNode.class.isInstance( getLeftHandOperand() ) ) ? null : ( (ParameterNode) getLeftHandOperand() ) .getHqlParameterSpecification(); - /** - * only one element in "in" cluster, e.g. - * where (a,b) in ( (1,2) ) this will be mutated to - * where a=1 and b=2 - */ + + final boolean negated = getType() == HqlSqlTokenTypes.NOT_IN; + if ( rhsNode != null && rhsNode.getNextSibling() == null ) { - String[] rhsElementTexts = extractMutationTexts( rhsNode, - rhsColumnSpan ); - setType( HqlSqlTokenTypes.AND ); - setText( "AND" ); - ParameterSpecification rhsEmbeddedCompositeParameterSpecification = rhsNode == null - || ( !ParameterNode.class.isInstance( rhsNode ) ) ? null - : ( (ParameterNode) rhsNode ) - .getHqlParameterSpecification(); - translate( lhsColumnSpan, HqlSqlTokenTypes.EQ, "=", lhsElementTexts, + /** + * only one element in the vector grouping. + * where (a,b) in ( (1,2) ) this will be mutated to + * where a=1 and b=2 + */ + String[] rhsElementTexts = extractMutationTexts( rhsNode, rhsColumnSpan ); + setType( negated ? HqlTokenTypes.OR : HqlSqlTokenTypes.AND ); + setText( negated ? "or" : "and" ); + ParameterSpecification rhsEmbeddedCompositeParameterSpecification = + rhsNode == null || ( !ParameterNode.class.isInstance( rhsNode ) ) + ? null + : ( (ParameterNode) rhsNode ).getHqlParameterSpecification(); + translate( + lhsColumnSpan, + negated ? HqlSqlTokenTypes.NE : HqlSqlTokenTypes.EQ, + negated ? "<>" : "=", + lhsElementTexts, rhsElementTexts, lhsEmbeddedCompositeParameterSpecification, - rhsEmbeddedCompositeParameterSpecification, this ); - } else { + rhsEmbeddedCompositeParameterSpecification, + this + ); + } + else { List andElementsNodeList = new ArrayList(); while ( rhsNode != null ) { - String[] rhsElementTexts = extractMutationTexts( rhsNode, - rhsColumnSpan ); - AST and = getASTFactory().create( HqlSqlTokenTypes.AND, "AND" ); - ParameterSpecification rhsEmbeddedCompositeParameterSpecification = rhsNode == null - || ( !ParameterNode.class.isInstance( rhsNode ) ) ? null - : ( (ParameterNode) rhsNode ) - .getHqlParameterSpecification(); - translate( lhsColumnSpan, HqlSqlTokenTypes.EQ, "=", - lhsElementTexts, rhsElementTexts, + String[] rhsElementTexts = extractMutationTexts( rhsNode, rhsColumnSpan ); + AST group = getASTFactory().create( + negated ? HqlSqlTokenTypes.OR : HqlSqlTokenTypes.AND, + negated ? "or" : "and" + ); + ParameterSpecification rhsEmbeddedCompositeParameterSpecification = + rhsNode == null || ( !ParameterNode.class.isInstance( rhsNode ) ) + ? null + : ( (ParameterNode) rhsNode ).getHqlParameterSpecification(); + translate( + lhsColumnSpan, + negated ? HqlSqlTokenTypes.NE : HqlSqlTokenTypes.EQ, + negated ? "<>" : "=", + lhsElementTexts, + rhsElementTexts, lhsEmbeddedCompositeParameterSpecification, - rhsEmbeddedCompositeParameterSpecification, and ); - andElementsNodeList.add( and ); + rhsEmbeddedCompositeParameterSpecification, + group + ); + andElementsNodeList.add( group ); rhsNode = (Node) rhsNode.getNextSibling(); } - setType( HqlSqlTokenTypes.OR ); - setText( "OR" ); + setType( negated ? HqlSqlTokenTypes.AND : HqlSqlTokenTypes.OR ); + setText( negated ? "and" : "or" ); AST curNode = this; for ( int i = andElementsNodeList.size() - 1; i > 1; i-- ) { - AST or = getASTFactory().create( HqlSqlTokenTypes.OR, "OR" ); - curNode.setFirstChild( or ); - curNode = or; + AST group = getASTFactory().create( + negated ? HqlSqlTokenTypes.AND : HqlSqlTokenTypes.OR, + negated ? "and" : "or" + ); + curNode.setFirstChild( group ); + curNode = group; AST and = (AST) andElementsNodeList.get( i ); - or.setNextSibling( and ); + group.setNextSibling( and ); } AST node0 = (AST) andElementsNodeList.get( 0 ); AST node1 = (AST) andElementsNodeList.get( 1 ); diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java index 6b56e102bb..22c57349b4 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/util/SessionFactoryHelper.java @@ -361,7 +361,7 @@ public class SessionFactoryHelper { * @return The sql function, or null if not found. */ public SQLFunction findSQLFunction(String functionName) { - return sfi.getSqlFunctionRegistry().findSQLFunction( functionName.toLowerCase() ); + return sfi.getSqlFunctionRegistry().findSQLFunction( functionName ); } /** diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/AbstractTableBasedBulkIdHandler.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/AbstractTableBasedBulkIdHandler.java new file mode 100644 index 0000000000..11d2793306 --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/AbstractTableBasedBulkIdHandler.java @@ -0,0 +1,184 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.hql.spi; + +import java.sql.SQLException; +import java.util.Collections; +import java.util.List; + +import antlr.RecognitionException; +import antlr.collections.AST; + +import org.hibernate.HibernateException; +import org.hibernate.JDBCException; +import org.hibernate.engine.jdbc.spi.JdbcServices; +import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.hql.internal.ast.HqlSqlWalker; +import org.hibernate.hql.internal.ast.SqlGenerator; +import org.hibernate.internal.util.StringHelper; +import org.hibernate.mapping.Table; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.InsertSelect; +import org.hibernate.sql.Select; +import org.hibernate.sql.SelectValues; + +/** + * @author Steve Ebersole + */ +public class AbstractTableBasedBulkIdHandler { + private final SessionFactoryImplementor sessionFactory; + private final HqlSqlWalker walker; + + private final String catalog; + private final String schema; + + public AbstractTableBasedBulkIdHandler( + SessionFactoryImplementor sessionFactory, + HqlSqlWalker walker, + String catalog, + String schema) { + this.sessionFactory = sessionFactory; + this.walker = walker; + this.catalog = catalog; + this.schema = schema; + } + + protected SessionFactoryImplementor factory() { + return sessionFactory; + } + + protected HqlSqlWalker walker() { + return walker; + } + + protected JDBCException convert(SQLException e, String message, String sql) { + throw factory().getSQLExceptionHelper().convert( e, message, sql ); + } + + protected static class ProcessedWhereClause { + public static final ProcessedWhereClause NO_WHERE_CLAUSE = new ProcessedWhereClause(); + + private final String userWhereClauseFragment; + private final List idSelectParameterSpecifications; + + private ProcessedWhereClause() { + this( "", Collections.emptyList() ); + } + + public ProcessedWhereClause(String userWhereClauseFragment, List idSelectParameterSpecifications) { + this.userWhereClauseFragment = userWhereClauseFragment; + this.idSelectParameterSpecifications = idSelectParameterSpecifications; + } + + public String getUserWhereClauseFragment() { + return userWhereClauseFragment; + } + + public List getIdSelectParameterSpecifications() { + return idSelectParameterSpecifications; + } + } + + @SuppressWarnings("unchecked") + protected ProcessedWhereClause processWhereClause(AST whereClause) { + if ( whereClause.getNumberOfChildren() != 0 ) { + // If a where clause was specified in the update/delete query, use it to limit the + // returned ids here... + try { + SqlGenerator sqlGenerator = new SqlGenerator( sessionFactory ); + sqlGenerator.whereClause( whereClause ); + String userWhereClause = sqlGenerator.getSQL().substring( 7 ); // strip the " where " + List idSelectParameterSpecifications = sqlGenerator.getCollectedParameters(); + + return new ProcessedWhereClause( userWhereClause, idSelectParameterSpecifications ); + } + catch ( RecognitionException e ) { + throw new HibernateException( "Unable to generate id select for DML operation", e ); + } + } + else { + return ProcessedWhereClause.NO_WHERE_CLAUSE; + } + } + + protected String generateIdInsertSelect(Queryable persister, String tableAlias, ProcessedWhereClause whereClause) { + Select select = new Select( sessionFactory.getDialect() ); + SelectValues selectClause = new SelectValues( sessionFactory.getDialect() ) + .addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() ); + addAnyExtraIdSelectValues( selectClause ); + select.setSelectClause( selectClause.render() ); + + String rootTableName = persister.getTableName(); + String fromJoinFragment = persister.fromJoinFragment( tableAlias, true, false ); + String whereJoinFragment = persister.whereJoinFragment( tableAlias, true, false ); + + select.setFromClause( rootTableName + ' ' + tableAlias + fromJoinFragment ); + + if ( whereJoinFragment == null ) { + whereJoinFragment = ""; + } + else { + whereJoinFragment = whereJoinFragment.trim(); + if ( whereJoinFragment.startsWith( "and" ) ) { + whereJoinFragment = whereJoinFragment.substring( 4 ); + } + } + + if ( whereClause.getUserWhereClauseFragment().length() > 0 ) { + if ( whereJoinFragment.length() > 0 ) { + whereJoinFragment += " and "; + } + } + select.setWhereClause( whereJoinFragment + whereClause.getUserWhereClauseFragment() ); + + InsertSelect insert = new InsertSelect( sessionFactory.getDialect() ); + if ( sessionFactory.getSettings().isCommentsEnabled() ) { + insert.setComment( "insert-select for " + persister.getEntityName() + " ids" ); + } + insert.setTableName( determineIdTableName( persister ) ); + insert.setSelect( select ); + return insert.toStatementString(); + } + + protected void addAnyExtraIdSelectValues(SelectValues selectClause) { + } + + protected String determineIdTableName(Queryable persister) { + // todo : use the identifier/name qualifier service once we pull that over to master + return Table.qualify( catalog, schema, persister.getTemporaryIdTableName() ); + } + + protected String generateIdSubselect(Queryable persister) { + return "select " + StringHelper.join( ", ", persister.getIdentifierColumnNames() ) + + " from " + determineIdTableName( persister ); + } + + protected void prepareForUse(Queryable persister, SessionImplementor session) { + } + + protected void releaseFromUse(Queryable persister, SessionImplementor session) { + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/MultiTableBulkIdStrategy.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/MultiTableBulkIdStrategy.java new file mode 100644 index 0000000000..30962d7171 --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/MultiTableBulkIdStrategy.java @@ -0,0 +1,105 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.hql.spi; + +import java.util.Map; + +import org.hibernate.cfg.Mappings; +import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess; +import org.hibernate.engine.jdbc.spi.JdbcServices; +import org.hibernate.engine.spi.Mapping; +import org.hibernate.engine.spi.QueryParameters; +import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.hql.internal.ast.HqlSqlWalker; +import org.hibernate.persister.entity.Queryable; + +/** + * Generalized strategy contract for handling multi-table bulk HQL operations. + * + * @author Steve Ebersole + */ +public interface MultiTableBulkIdStrategy { + /** + * Prepare the strategy. Called as the SessionFactory is being built. Intended patterns here include:

    + *
  • Adding tables to the passed Mappings, to be picked by by "schema management tools"
  • + *
  • Manually creating the tables immediately through the passed JDBC Connection access
  • + *
+ * + * @param jdbcServices The JdbcService object + * @param connectionAccess Access to the JDBC Connection + * @param mappings The Hibernate Mappings object, for access to O/RM mapping information + * @param mapping The Hibernate Mapping contract, mainly for use in DDL generation + * @param settings Configuration settings + */ + public void prepare(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess, Mappings mappings, Mapping mapping, Map settings); + + /** + * Release the strategy. Called as the SessionFactory is being shut down. + * + * @param jdbcServices The JdbcService object + * @param connectionAccess Access to the JDBC Connection + */ + public void release(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess); + + /** + * Handler for dealing with multi-table HQL bulk update statements. + */ + public static interface UpdateHandler { + public Queryable getTargetedQueryable(); + public String[] getSqlStatements(); + + public int execute(SessionImplementor session, QueryParameters queryParameters); + } + + /** + * Build a handler capable of handling the bulk update indicated by the given walker. + * + * @param factory The SessionFactory + * @param walker The AST walker, representing the update query + * + * @return The handler + */ + public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker); + + /** + * Handler for dealing with multi-table HQL bulk delete statements. + */ + public static interface DeleteHandler { + public Queryable getTargetedQueryable(); + public String[] getSqlStatements(); + + public int execute(SessionImplementor session, QueryParameters queryParameters); + } + + /** + * Build a handler capable of handling the bulk delete indicated by the given walker. + * + * @param factory The SessionFactory + * @param walker The AST walker, representing the delete query + * + * @return The handler + */ + public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker); +} diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/PersistentTableBulkIdStrategy.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/PersistentTableBulkIdStrategy.java new file mode 100644 index 0000000000..b328f1136d --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/PersistentTableBulkIdStrategy.java @@ -0,0 +1,322 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.hql.spi; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.hibernate.HibernateException; +import org.hibernate.JDBCException; +import org.hibernate.cfg.AvailableSettings; +import org.hibernate.cfg.Mappings; +import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess; +import org.hibernate.engine.jdbc.spi.JdbcServices; +import org.hibernate.engine.spi.Mapping; +import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.hql.internal.ast.HqlSqlWalker; +import org.hibernate.internal.AbstractSessionImpl; +import org.hibernate.internal.CoreMessageLogger; +import org.hibernate.internal.util.config.ConfigurationHelper; +import org.hibernate.mapping.Column; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.mapping.Table; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.SelectValues; +import org.hibernate.type.UUIDCharType; +import org.jboss.logging.Logger; + +/** + * @author Steve Ebersole + */ +public class PersistentTableBulkIdStrategy implements MultiTableBulkIdStrategy { + private static final CoreMessageLogger log = Logger.getMessageLogger( + CoreMessageLogger.class, + PersistentTableBulkIdStrategy.class.getName() + ); + + public static final String SHORT_NAME = "persistent"; + + public static final String CLEAN_UP_ID_TABLES = "hibernate.hql.bulk_id_strategy.persistent.clean_up"; + public static final String SCHEMA = "hibernate.hql.bulk_id_strategy.persistent.schema"; + public static final String CATALOG = "hibernate.hql.bulk_id_strategy.persistent.catalog"; + + private String catalog; + private String schema; + private boolean cleanUpTables; + private List tableCleanUpDdl; + + @Override + public void prepare( + JdbcServices jdbcServices, + JdbcConnectionAccess connectionAccess, + Mappings mappings, + Mapping mapping, + Map settings) { + this.catalog = ConfigurationHelper.getString( + CATALOG, + settings, + ConfigurationHelper.getString( AvailableSettings.DEFAULT_CATALOG, settings ) + ); + this.schema = ConfigurationHelper.getString( + SCHEMA, + settings, + ConfigurationHelper.getString( AvailableSettings.DEFAULT_SCHEMA, settings ) + ); + this.cleanUpTables = ConfigurationHelper.getBoolean( CLEAN_UP_ID_TABLES, settings, false ); + + final Iterator entityMappings = mappings.iterateClasses(); + final List idTableDefinitions = new ArrayList
(); + while ( entityMappings.hasNext() ) { + final PersistentClass entityMapping = entityMappings.next(); + final Table idTableDefinition = generateIdTableDefinition( entityMapping ); + idTableDefinitions.add( idTableDefinition ); + } + exportTableDefinitions( idTableDefinitions, jdbcServices, connectionAccess, mappings, mapping ); + } + + protected Table generateIdTableDefinition(PersistentClass entityMapping) { + Table idTable = new Table( entityMapping.getTemporaryIdTableName() ); + if ( catalog != null ) { + idTable.setCatalog( catalog ); + } + if ( schema != null ) { + idTable.setSchema( schema ); + } + Iterator itr = entityMapping.getTable().getPrimaryKey().getColumnIterator(); + while( itr.hasNext() ) { + Column column = (Column) itr.next(); + idTable.addColumn( column.clone() ); + } + Column sessionIdColumn = new Column( "hib_sess_id" ); + sessionIdColumn.setSqlType( "CHAR(36)" ); + sessionIdColumn.setComment( "Used to hold the Hibernate Session identifier" ); + idTable.addColumn( sessionIdColumn ); + + idTable.setComment( "Used to hold id values for the " + entityMapping.getEntityName() + " class" ); + return idTable; + } + + protected void exportTableDefinitions( + List
idTableDefinitions, + JdbcServices jdbcServices, + JdbcConnectionAccess connectionAccess, + Mappings mappings, + Mapping mapping) { + try { + Connection connection; + try { + connection = connectionAccess.obtainConnection(); + } + catch (UnsupportedOperationException e) { + // assume this comes from org.hibernate.engine.jdbc.connections.internal.UserSuppliedConnectionProviderImpl + log.debug( "Unable to obtain JDBC connection; assuming ID tables already exist or wont be needed" ); + return; + } + + try { + Statement statement = connection.createStatement(); + + for ( Table idTableDefinition : idTableDefinitions ) { + if ( cleanUpTables ) { + if ( tableCleanUpDdl == null ) { + tableCleanUpDdl = new ArrayList(); + } + tableCleanUpDdl.add( idTableDefinition.sqlDropString( jdbcServices.getDialect(), null, null ) ); + } + try { + final String sql = idTableDefinition.sqlCreateString( jdbcServices.getDialect(), mapping, null, null ); + jdbcServices.getSqlStatementLogger().logStatement( sql ); + statement.execute( sql ); + } + catch (SQLException e) { + log.debugf( "Error attempting to export id-table [%s] : %s", idTableDefinition.getName(), e.getMessage() ); + } + } + + statement.close(); + } + catch (SQLException e) { + log.error( "Unable to use JDBC Connection to create Statement", e ); + } + finally { + try { + connectionAccess.releaseConnection( connection ); + } + catch (SQLException ignore) { + } + } + } + catch (SQLException e) { + log.error( "Unable obtain JDBC Connection", e ); + } + } + + @Override + public void release(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess) { + if ( ! cleanUpTables || tableCleanUpDdl == null ) { + return; + } + + try { + Connection connection = connectionAccess.obtainConnection(); + + try { + Statement statement = connection.createStatement(); + + for ( String cleanupDdl : tableCleanUpDdl ) { + try { + jdbcServices.getSqlStatementLogger().logStatement( cleanupDdl ); + statement.execute( cleanupDdl ); + } + catch (SQLException e) { + log.debugf( "Error attempting to cleanup id-table : [%s]", e.getMessage() ); + } + } + + statement.close(); + } + catch (SQLException e) { + log.error( "Unable to use JDBC Connection to create Statement", e ); + } + finally { + try { + connectionAccess.releaseConnection( connection ); + } + catch (SQLException ignore) { + } + } + } + catch (SQLException e) { + log.error( "Unable obtain JDBC Connection", e ); + } + } + + @Override + public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) { + return new TableBasedUpdateHandlerImpl( factory, walker, catalog, schema ) { + @Override + protected void addAnyExtraIdSelectValues(SelectValues selectClause) { + selectClause.addParameter( Types.CHAR, 36 ); + } + + @Override + protected String generateIdSubselect(Queryable persister) { + return super.generateIdSubselect( persister ) + " where hib_sess_id=?"; + } + + @Override + protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException { + bindSessionIdentifier( ps, session, pos ); + return 1; + } + + @Override + protected void handleAddedParametersOnUpdate(PreparedStatement ps, SessionImplementor session, int position) throws SQLException { + bindSessionIdentifier( ps, session, position ); + } + + @Override + protected void releaseFromUse(Queryable persister, SessionImplementor session) { + // clean up our id-table rows + cleanUpRows( determineIdTableName( persister ), session ); + } + }; + } + + private void bindSessionIdentifier(PreparedStatement ps, SessionImplementor session, int position) throws SQLException { + if ( ! AbstractSessionImpl.class.isInstance( session ) ) { + throw new HibernateException( "Only available on SessionImpl instances" ); + } + UUIDCharType.INSTANCE.set( ps, ( (AbstractSessionImpl) session ).getSessionIdentifier(), position, session ); + } + + private void cleanUpRows(String tableName, SessionImplementor session) { + final String sql = "delete from " + tableName + " where hib_sess_id=?"; + try { + PreparedStatement ps = null; + try { + ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false ); + bindSessionIdentifier( ps, session, 1 ); + ps.executeUpdate(); + } + finally { + if ( ps != null ) { + try { + ps.close(); + } + catch( Throwable ignore ) { + // ignore + } + } + } + } + catch (SQLException e) { + throw convert( session.getFactory(), e, "Unable to clean up id table [" + tableName + "]", sql ); + } + } + + protected JDBCException convert(SessionFactoryImplementor factory, SQLException e, String message, String sql) { + throw factory.getSQLExceptionHelper().convert( e, message, sql ); + } + + @Override + public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) { + return new TableBasedDeleteHandlerImpl( factory, walker, catalog, schema ) { + @Override + protected void addAnyExtraIdSelectValues(SelectValues selectClause) { + selectClause.addParameter( Types.CHAR, 36 ); + } + + @Override + protected String generateIdSubselect(Queryable persister) { + return super.generateIdSubselect( persister ) + " where hib_sess_id=?"; + } + + @Override + protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException { + bindSessionIdentifier( ps, session, pos ); + return 1; + } + + @Override + protected void handleAddedParametersOnDelete(PreparedStatement ps, SessionImplementor session) throws SQLException { + bindSessionIdentifier( ps, session, 1 ); + } + + @Override + protected void releaseFromUse(Queryable persister, SessionImplementor session) { + // clean up our id-table rows + cleanUpRows( determineIdTableName( persister ), session ); + } + }; + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedDeleteHandlerImpl.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedDeleteHandlerImpl.java new file mode 100644 index 0000000000..8b51d537b3 --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedDeleteHandlerImpl.java @@ -0,0 +1,172 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.hql.spi; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.List; + +import org.jboss.logging.Logger; + +import org.hibernate.engine.spi.QueryParameters; +import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.hql.internal.ast.HqlSqlWalker; +import org.hibernate.hql.internal.ast.tree.DeleteStatement; +import org.hibernate.hql.internal.ast.tree.FromElement; +import org.hibernate.internal.util.StringHelper; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.Delete; + +/** +* @author Steve Ebersole +*/ +public class TableBasedDeleteHandlerImpl + extends AbstractTableBasedBulkIdHandler + implements MultiTableBulkIdStrategy.DeleteHandler { + private static final Logger log = Logger.getLogger( TableBasedDeleteHandlerImpl.class ); + + private final Queryable targetedPersister; + + private final String idInsertSelect; + private final List idSelectParameterSpecifications; + private final String[] deletes; + + public TableBasedDeleteHandlerImpl(SessionFactoryImplementor factory, HqlSqlWalker walker) { + this( factory, walker, null, null ); + } + + public TableBasedDeleteHandlerImpl( + SessionFactoryImplementor factory, + HqlSqlWalker walker, + String catalog, + String schema) { + super( factory, walker, catalog, schema ); + + DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST(); + FromElement fromElement = deleteStatement.getFromClause().getFromElement(); + + this.targetedPersister = fromElement.getQueryable(); + final String bulkTargetAlias = fromElement.getTableAlias(); + + final ProcessedWhereClause processedWhereClause = processWhereClause( deleteStatement.getWhereClause() ); + this.idSelectParameterSpecifications = processedWhereClause.getIdSelectParameterSpecifications(); + this.idInsertSelect = generateIdInsertSelect( targetedPersister, bulkTargetAlias, processedWhereClause ); + log.tracev( "Generated ID-INSERT-SELECT SQL (multi-table delete) : {0}", idInsertSelect ); + + String[] tableNames = targetedPersister.getConstraintOrderedTableNameClosure(); + String[][] columnNames = targetedPersister.getContraintOrderedTableKeyColumnClosure(); + String idSubselect = generateIdSubselect( targetedPersister ); + + deletes = new String[tableNames.length]; + for ( int i = tableNames.length - 1; i >= 0; i-- ) { + // TODO : an optimization here would be to consider cascade deletes and not gen those delete statements; + // the difficulty is the ordering of the tables here vs the cascade attributes on the persisters -> + // the table info gotten here should really be self-contained (i.e., a class representation + // defining all the needed attributes), then we could then get an array of those + final Delete delete = new Delete() + .setTableName( tableNames[i] ) + .setWhere( "(" + StringHelper.join( ", ", columnNames[i] ) + ") IN (" + idSubselect + ")" ); + if ( factory().getSettings().isCommentsEnabled() ) { + delete.setComment( "bulk delete" ); + } + + deletes[i] = delete.toStatementString(); + } + } + + @Override + public Queryable getTargetedQueryable() { + return targetedPersister; + } + + @Override + public String[] getSqlStatements() { + return deletes; + } + + @Override + public int execute(SessionImplementor session, QueryParameters queryParameters) { + prepareForUse( targetedPersister, session ); + try { + PreparedStatement ps = null; + int resultCount = 0; + try { + try { + ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false ); + int pos = 1; + pos += handlePrependedParametersOnIdSelection( ps, session, pos ); + for ( ParameterSpecification parameterSpecification : idSelectParameterSpecifications ) { + pos += parameterSpecification.bind( ps, queryParameters, session, pos ); + } + resultCount = ps.executeUpdate(); + } + finally { + if ( ps != null ) { + ps.close(); + } + } + } + catch( SQLException e ) { + throw convert( e, "could not insert/select ids for bulk delete", idInsertSelect ); + } + + // Start performing the deletes + for ( String delete : deletes ) { + try { + try { + ps = session.getTransactionCoordinator() + .getJdbcCoordinator() + .getStatementPreparer() + .prepareStatement( delete, false ); + handleAddedParametersOnDelete( ps, session ); + ps.executeUpdate(); + } + finally { + if ( ps != null ) { + ps.close(); + } + } + } + catch (SQLException e) { + throw convert( e, "error performing bulk delete", delete ); + } + } + + return resultCount; + + } + finally { + releaseFromUse( targetedPersister, session ); + } + } + + protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException { + return 0; + } + + protected void handleAddedParametersOnDelete(PreparedStatement ps, SessionImplementor session) throws SQLException { + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedUpdateHandlerImpl.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedUpdateHandlerImpl.java new file mode 100644 index 0000000000..5b2a990213 --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/TableBasedUpdateHandlerImpl.java @@ -0,0 +1,198 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.hql.spi; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.jboss.logging.Logger; + +import org.hibernate.engine.spi.QueryParameters; +import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.hql.internal.ast.HqlSqlWalker; +import org.hibernate.hql.internal.ast.tree.AssignmentSpecification; +import org.hibernate.hql.internal.ast.tree.FromElement; +import org.hibernate.hql.internal.ast.tree.UpdateStatement; +import org.hibernate.internal.util.StringHelper; +import org.hibernate.param.ParameterSpecification; +import org.hibernate.persister.entity.Queryable; +import org.hibernate.sql.Update; + +/** +* @author Steve Ebersole +*/ +public class TableBasedUpdateHandlerImpl + extends AbstractTableBasedBulkIdHandler + implements MultiTableBulkIdStrategy.UpdateHandler { + + private static final Logger log = Logger.getLogger( TableBasedUpdateHandlerImpl.class ); + + private final Queryable targetedPersister; + + private final String idInsertSelect; + private final List idSelectParameterSpecifications; + + private final String[] updates; + private final ParameterSpecification[][] assignmentParameterSpecifications; + + @SuppressWarnings("unchecked") + public TableBasedUpdateHandlerImpl(SessionFactoryImplementor factory, HqlSqlWalker walker) { + this( factory, walker, null, null ); + } + + public TableBasedUpdateHandlerImpl( + SessionFactoryImplementor factory, + HqlSqlWalker walker, + String catalog, + String schema) { + super( factory, walker, catalog, schema ); + + UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST(); + FromElement fromElement = updateStatement.getFromClause().getFromElement(); + + this.targetedPersister = fromElement.getQueryable(); + final String bulkTargetAlias = fromElement.getTableAlias(); + + final ProcessedWhereClause processedWhereClause = processWhereClause( updateStatement.getWhereClause() ); + this.idSelectParameterSpecifications = processedWhereClause.getIdSelectParameterSpecifications(); + this.idInsertSelect = generateIdInsertSelect( targetedPersister, bulkTargetAlias, processedWhereClause ); + log.tracev( "Generated ID-INSERT-SELECT SQL (multi-table update) : {0}", idInsertSelect ); + + String[] tableNames = targetedPersister.getConstraintOrderedTableNameClosure(); + String[][] columnNames = targetedPersister.getContraintOrderedTableKeyColumnClosure(); + String idSubselect = generateIdSubselect( targetedPersister ); + + updates = new String[tableNames.length]; + assignmentParameterSpecifications = new ParameterSpecification[tableNames.length][]; + for ( int tableIndex = 0; tableIndex < tableNames.length; tableIndex++ ) { + boolean affected = false; + final List parameterList = new ArrayList(); + final Update update = new Update( factory().getDialect() ) + .setTableName( tableNames[tableIndex] ) + .setWhere( "(" + StringHelper.join( ", ", columnNames[tableIndex] ) + ") IN (" + idSubselect + ")" ); + if ( factory().getSettings().isCommentsEnabled() ) { + update.setComment( "bulk update" ); + } + final List assignmentSpecifications = walker.getAssignmentSpecifications(); + for ( AssignmentSpecification assignmentSpecification : assignmentSpecifications ) { + if ( assignmentSpecification.affectsTable( tableNames[tableIndex] ) ) { + affected = true; + update.appendAssignmentFragment( assignmentSpecification.getSqlAssignmentFragment() ); + if ( assignmentSpecification.getParameters() != null ) { + for ( int paramIndex = 0; paramIndex < assignmentSpecification.getParameters().length; paramIndex++ ) { + parameterList.add( assignmentSpecification.getParameters()[paramIndex] ); + } + } + } + } + if ( affected ) { + updates[tableIndex] = update.toStatementString(); + assignmentParameterSpecifications[tableIndex] = parameterList.toArray( new ParameterSpecification[parameterList.size()] ); + } + } + } + + @Override + public Queryable getTargetedQueryable() { + return targetedPersister; + } + + @Override + public String[] getSqlStatements() { + return updates; + } + + @Override + public int execute(SessionImplementor session, QueryParameters queryParameters) { + prepareForUse( targetedPersister, session ); + try { + // First, save off the pertinent ids, as the return value + PreparedStatement ps = null; + int resultCount = 0; + try { + try { + ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false ); + int sum = 1; + sum += handlePrependedParametersOnIdSelection( ps, session, sum ); + for ( ParameterSpecification parameterSpecification : idSelectParameterSpecifications ) { + sum += parameterSpecification.bind( ps, queryParameters, session, sum ); + } + resultCount = ps.executeUpdate(); + } + finally { + if ( ps != null ) { + ps.close(); + } + } + } + catch( SQLException e ) { + throw convert( e, "could not insert/select ids for bulk update", idInsertSelect ); + } + + // Start performing the updates + for ( int i = 0; i < updates.length; i++ ) { + if ( updates[i] == null ) { + continue; + } + try { + try { + ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( updates[i], false ); + if ( assignmentParameterSpecifications[i] != null ) { + int position = 1; // jdbc params are 1-based + for ( int x = 0; x < assignmentParameterSpecifications[i].length; x++ ) { + position += assignmentParameterSpecifications[i][x].bind( ps, queryParameters, session, position ); + } + handleAddedParametersOnUpdate( ps, session, position ); + } + ps.executeUpdate(); + } + finally { + if ( ps != null ) { + ps.close(); + } + } + } + catch( SQLException e ) { + throw convert( e, "error performing bulk update", updates[i] ); + } + } + + return resultCount; + } + finally { + releaseFromUse( targetedPersister, session ); + } + } + + protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException { + return 0; + } + + protected void handleAddedParametersOnUpdate(PreparedStatement ps, SessionImplementor session, int position) throws SQLException { + //To change body of created methods use File | Settings | File Templates. + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/AbstractStatementExecutor.java b/hibernate-core/src/main/java/org/hibernate/hql/spi/TemporaryTableBulkIdStrategy.java similarity index 50% rename from hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/AbstractStatementExecutor.java rename to hibernate-core/src/main/java/org/hibernate/hql/spi/TemporaryTableBulkIdStrategy.java index d745bac252..e3a57c0319 100644 --- a/hibernate-core/src/main/java/org/hibernate/hql/internal/ast/exec/AbstractStatementExecutor.java +++ b/hibernate-core/src/main/java/org/hibernate/hql/spi/TemporaryTableBulkIdStrategy.java @@ -1,7 +1,7 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * - * Copyright (c) 2010, Red Hat Inc. or third-party contributors as + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are * distributed under license by Red Hat Inc. @@ -21,122 +21,156 @@ * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ -package org.hibernate.hql.internal.ast.exec; +package org.hibernate.hql.spi; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLWarning; import java.sql.Statement; -import java.util.Collections; -import java.util.List; +import java.util.Map; -import antlr.RecognitionException; -import antlr.collections.AST; -import org.jboss.logging.Logger; - -import org.hibernate.HibernateException; -import org.hibernate.action.internal.BulkOperationCleanupAction; +import org.hibernate.cfg.Mappings; +import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess; import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.SqlExceptionHelper; +import org.hibernate.engine.spi.Mapping; import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionImplementor; -import org.hibernate.event.spi.EventSource; import org.hibernate.hql.internal.ast.HqlSqlWalker; -import org.hibernate.hql.internal.ast.SqlGenerator; import org.hibernate.internal.CoreMessageLogger; -import org.hibernate.internal.util.StringHelper; import org.hibernate.jdbc.AbstractWork; import org.hibernate.persister.entity.Queryable; -import org.hibernate.sql.InsertSelect; -import org.hibernate.sql.Select; -import org.hibernate.sql.SelectFragment; +import org.jboss.logging.Logger; /** - * Implementation of AbstractStatementExecutor. - * * @author Steve Ebersole */ -public abstract class AbstractStatementExecutor implements StatementExecutor { +public class TemporaryTableBulkIdStrategy implements MultiTableBulkIdStrategy { + public static final TemporaryTableBulkIdStrategy INSTANCE = new TemporaryTableBulkIdStrategy(); - private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, - AbstractStatementExecutor.class.getName()); + public static final String SHORT_NAME = "temporary"; - private final HqlSqlWalker walker; - private List idSelectParameterSpecifications = Collections.EMPTY_LIST; + private static final CoreMessageLogger log = Logger.getMessageLogger( + CoreMessageLogger.class, + TemporaryTableBulkIdStrategy.class.getName() + ); - public AbstractStatementExecutor( HqlSqlWalker walker, - CoreMessageLogger log ) { - this.walker = walker; + @Override + public void prepare(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess, Mappings mappings, Mapping mapping, Map settings) { + // nothing to do } - protected HqlSqlWalker getWalker() { - return walker; + @Override + public void release(JdbcServices jdbcServices, JdbcConnectionAccess connectionAccess) { + // nothing to do } - protected SessionFactoryImplementor getFactory() { - return walker.getSessionFactoryHelper().getFactory(); + @Override + public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) { + return new TableBasedUpdateHandlerImpl( factory, walker ) { + @Override + protected void prepareForUse(Queryable persister, SessionImplementor session) { + createTempTable( persister, session ); + } + + @Override + protected void releaseFromUse(Queryable persister, SessionImplementor session) { + releaseTempTable( persister, session ); + } + }; } - protected List getIdSelectParameterSpecifications() { - return idSelectParameterSpecifications; + @Override + public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) { + return new TableBasedDeleteHandlerImpl( factory, walker ) { + @Override + protected void prepareForUse(Queryable persister, SessionImplementor session) { + createTempTable( persister, session ); + } + + @Override + protected void releaseFromUse(Queryable persister, SessionImplementor session) { + releaseTempTable( persister, session ); + } + }; } - protected abstract Queryable[] getAffectedQueryables(); - protected String generateIdInsertSelect(Queryable persister, String tableAlias, AST whereClause) { - Select select = new Select( getFactory().getDialect() ); - SelectFragment selectFragment = new SelectFragment() - .addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() ); - select.setSelectClause( selectFragment.toFragmentString().substring( 2 ) ); - - String rootTableName = persister.getTableName(); - String fromJoinFragment = persister.fromJoinFragment( tableAlias, true, false ); - String whereJoinFragment = persister.whereJoinFragment( tableAlias, true, false ); - - select.setFromClause( rootTableName + ' ' + tableAlias + fromJoinFragment ); - - if ( whereJoinFragment == null ) { - whereJoinFragment = ""; + protected void createTempTable(Queryable persister, SessionImplementor session) { + // Don't really know all the codes required to adequately decipher returned jdbc exceptions here. + // simply allow the failure to be eaten and the subsequent insert-selects/deletes should fail + TemporaryTableCreationWork work = new TemporaryTableCreationWork( persister ); + if ( shouldIsolateTemporaryTableDDL( session ) ) { + session.getTransactionCoordinator() + .getTransaction() + .createIsolationDelegate() + .delegateWork( work, session.getFactory().getSettings().isDataDefinitionInTransactionSupported() ); } else { - whereJoinFragment = whereJoinFragment.trim(); - if ( whereJoinFragment.startsWith( "and" ) ) { - whereJoinFragment = whereJoinFragment.substring( 4 ); - } + final Connection connection = session.getTransactionCoordinator() + .getJdbcCoordinator() + .getLogicalConnection() + .getShareableConnectionProxy(); + work.execute( connection ); + session.getTransactionCoordinator() + .getJdbcCoordinator() + .getLogicalConnection() + .afterStatementExecution(); } - - String userWhereClause = ""; - if ( whereClause.getNumberOfChildren() != 0 ) { - // If a where clause was specified in the update/delete query, use it to limit the - // returned ids here... - try { - SqlGenerator sqlGenerator = new SqlGenerator( getFactory() ); - sqlGenerator.whereClause( whereClause ); - userWhereClause = sqlGenerator.getSQL().substring( 7 ); // strip the " where " - idSelectParameterSpecifications = sqlGenerator.getCollectedParameters(); - } - catch ( RecognitionException e ) { - throw new HibernateException( "Unable to generate id select for DML operation", e ); - } - if ( whereJoinFragment.length() > 0 ) { - whereJoinFragment += " and "; - } - } - - select.setWhereClause( whereJoinFragment + userWhereClause ); - - InsertSelect insert = new InsertSelect( getFactory().getDialect() ); - if ( getFactory().getSettings().isCommentsEnabled() ) { - insert.setComment( "insert-select for " + persister.getEntityName() + " ids" ); - } - insert.setTableName( persister.getTemporaryIdTableName() ); - insert.setSelect( select ); - return insert.toStatementString(); } - protected String generateIdSubselect(Queryable persister) { - return "select " + StringHelper.join( ", ", persister.getIdentifierColumnNames() ) + - " from " + persister.getTemporaryIdTableName(); + protected void releaseTempTable(Queryable persister, SessionImplementor session) { + if ( session.getFactory().getDialect().dropTemporaryTableAfterUse() ) { + TemporaryTableDropWork work = new TemporaryTableDropWork( persister, session ); + if ( shouldIsolateTemporaryTableDDL( session ) ) { + session.getTransactionCoordinator() + .getTransaction() + .createIsolationDelegate() + .delegateWork( work, session.getFactory().getSettings().isDataDefinitionInTransactionSupported() ); + } + else { + final Connection connection = session.getTransactionCoordinator() + .getJdbcCoordinator() + .getLogicalConnection() + .getShareableConnectionProxy(); + work.execute( connection ); + session.getTransactionCoordinator() + .getJdbcCoordinator() + .getLogicalConnection() + .afterStatementExecution(); + } + } + else { + // at the very least cleanup the data :) + PreparedStatement ps = null; + try { + final String sql = "delete from " + persister.getTemporaryIdTableName(); + ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false ); + ps.executeUpdate(); + } + catch( Throwable t ) { + log.unableToCleanupTemporaryIdTable(t); + } + finally { + if ( ps != null ) { + try { + ps.close(); + } + catch( Throwable ignore ) { + // ignore + } + } + } + } + } + + @SuppressWarnings({ "UnnecessaryUnboxing" }) + protected boolean shouldIsolateTemporaryTableDDL(SessionImplementor session) { + Boolean dialectVote = session.getFactory().getDialect().performTemporaryTableDDLInIsolation(); + if ( dialectVote != null ) { + return dialectVote.booleanValue(); + } + return session.getFactory().getSettings().isDataDefinitionImplicitCommit(); } private static class TemporaryTableCreationWork extends AbstractWork { @@ -168,46 +202,24 @@ public abstract class AbstractStatementExecutor implements StatementExecutor { } } catch( Exception e ) { - LOG.debug( "unable to create temporary id table [" + e.getMessage() + "]" ); + log.debug( "unable to create temporary id table [" + e.getMessage() + "]" ); } } } - protected void createTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) { - // Don't really know all the codes required to adequately decipher returned jdbc exceptions here. - // simply allow the failure to be eaten and the subsequent insert-selects/deletes should fail - TemporaryTableCreationWork work = new TemporaryTableCreationWork( persister ); - if ( shouldIsolateTemporaryTableDDL() ) { - session.getTransactionCoordinator() - .getTransaction() - .createIsolationDelegate() - .delegateWork( work, getFactory().getSettings().isDataDefinitionInTransactionSupported() ); - } - else { - final Connection connection = session.getTransactionCoordinator() - .getJdbcCoordinator() - .getLogicalConnection() - .getShareableConnectionProxy(); - work.execute( connection ); - session.getTransactionCoordinator() - .getJdbcCoordinator() - .getLogicalConnection() - .afterStatementExecution(); - } - } private static SqlExceptionHelper.WarningHandler CREATION_WARNING_HANDLER = new SqlExceptionHelper.WarningHandlerLoggingSupport() { public boolean doProcess() { - return LOG.isDebugEnabled(); + return log.isDebugEnabled(); } public void prepare(SQLWarning warning) { - LOG.warningsCreatingTempTable( warning ); + log.warningsCreatingTempTable( warning ); } @Override protected void logWarning(String description, String message) { - LOG.debug( description ); - LOG.debug( message ); + log.debug( description ); + log.debug( message ); } }; @@ -240,71 +252,9 @@ public abstract class AbstractStatementExecutor implements StatementExecutor { } } catch( Exception e ) { - LOG.warn( "unable to drop temporary id table after use [" + e.getMessage() + "]" ); + log.warn( "unable to drop temporary id table after use [" + e.getMessage() + "]" ); } } } - protected void dropTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) { - if ( getFactory().getDialect().dropTemporaryTableAfterUse() ) { - TemporaryTableDropWork work = new TemporaryTableDropWork( persister, session ); - if ( shouldIsolateTemporaryTableDDL() ) { - session.getTransactionCoordinator() - .getTransaction() - .createIsolationDelegate() - .delegateWork( work, getFactory().getSettings().isDataDefinitionInTransactionSupported() ); - } - else { - final Connection connection = session.getTransactionCoordinator() - .getJdbcCoordinator() - .getLogicalConnection() - .getShareableConnectionProxy(); - work.execute( connection ); - session.getTransactionCoordinator() - .getJdbcCoordinator() - .getLogicalConnection() - .afterStatementExecution(); - } - } - else { - // at the very least cleanup the data :) - PreparedStatement ps = null; - try { - final String sql = "delete from " + persister.getTemporaryIdTableName(); - ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false ); - ps.executeUpdate(); - } - catch( Throwable t ) { - LOG.unableToCleanupTemporaryIdTable(t); - } - finally { - if ( ps != null ) { - try { - ps.close(); - } - catch( Throwable ignore ) { - // ignore - } - } - } - } - } - - protected void coordinateSharedCacheCleanup(SessionImplementor session) { - BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getAffectedQueryables() ); - - if ( session.isEventSource() ) { - ( ( EventSource ) session ).getActionQueue().addAction( action ); - } - else { - action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session ); - } - } - - @SuppressWarnings({ "UnnecessaryUnboxing" }) - protected boolean shouldIsolateTemporaryTableDDL() { - Boolean dialectVote = getFactory().getDialect().performTemporaryTableDDLInIsolation(); - if (dialectVote != null) return dialectVote.booleanValue(); - return getFactory().getSettings().isDataDefinitionImplicitCommit(); - } } diff --git a/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java b/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java index 20029ac0fb..94890271ed 100755 --- a/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/AbstractSessionImpl.java @@ -27,6 +27,7 @@ import java.io.Serializable; import java.sql.Connection; import java.sql.SQLException; import java.util.List; +import java.util.UUID; import org.hibernate.HibernateException; import org.hibernate.MappingException; @@ -51,6 +52,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.transaction.spi.TransactionContext; import org.hibernate.engine.transaction.spi.TransactionEnvironment; +import org.hibernate.id.uuid.StandardRandomStrategy; import org.hibernate.jdbc.WorkExecutor; import org.hibernate.jdbc.WorkExecutorVisitable; import org.hibernate.persister.entity.EntityPersister; @@ -317,6 +319,15 @@ public abstract class AbstractSessionImpl implements Serializable, SharedSession return jdbcConnectionAccess; } + private UUID sessionIdentifier; + + public UUID getSessionIdentifier() { + if ( sessionIdentifier == null ) { + sessionIdentifier = StandardRandomStrategy.INSTANCE.generateUUID( this ); + } + return sessionIdentifier; + } + private static class NonContextualJdbcConnectionAccess implements JdbcConnectionAccess, Serializable { private final ConnectionProvider connectionProvider; diff --git a/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java b/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java index de1a9f5d96..70dcaa9666 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java @@ -29,6 +29,7 @@ import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.sql.Connection; +import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -38,12 +39,11 @@ import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; + import javax.naming.Reference; import javax.naming.StringRefAddr; import javax.persistence.metamodel.Metamodel; -import org.jboss.logging.Logger; - import org.hibernate.AssertionFailure; import org.hibernate.Cache; import org.hibernate.ConnectionReleaseMode; @@ -53,6 +53,7 @@ import org.hibernate.EntityNameResolver; import org.hibernate.HibernateException; import org.hibernate.Interceptor; import org.hibernate.MappingException; +import org.hibernate.MultiTenancyStrategy; import org.hibernate.ObjectNotFoundException; import org.hibernate.QueryException; import org.hibernate.Session; @@ -63,6 +64,8 @@ import org.hibernate.StatelessSession; import org.hibernate.StatelessSessionBuilder; import org.hibernate.TypeHelper; import org.hibernate.boot.registry.StandardServiceRegistry; +import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; +import org.hibernate.boot.registry.classloading.spi.ClassLoadingException; import org.hibernate.boot.registry.selector.spi.StrategySelector; import org.hibernate.cache.internal.CacheDataDescriptionImpl; import org.hibernate.cache.spi.CollectionRegion; @@ -91,8 +94,13 @@ import org.hibernate.dialect.Dialect; import org.hibernate.dialect.function.SQLFunction; import org.hibernate.dialect.function.SQLFunctionRegistry; import org.hibernate.engine.ResultSetMappingDefinition; +import org.hibernate.engine.config.spi.ConfigurationService; +import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; +import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess; +import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider; import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.SqlExceptionHelper; +import org.hibernate.engine.jndi.spi.JndiService; import org.hibernate.engine.profile.Association; import org.hibernate.engine.profile.Fetch; import org.hibernate.engine.profile.FetchProfile; @@ -107,6 +115,7 @@ import org.hibernate.engine.spi.SessionBuilderImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionOwner; import org.hibernate.engine.transaction.internal.TransactionCoordinatorImpl; +import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; import org.hibernate.engine.transaction.spi.TransactionEnvironment; import org.hibernate.exception.spi.SQLExceptionConverter; import org.hibernate.id.IdentifierGenerator; @@ -131,12 +140,6 @@ import org.hibernate.persister.entity.Queryable; import org.hibernate.persister.spi.PersisterFactory; import org.hibernate.proxy.EntityNotFoundDelegate; import org.hibernate.service.ServiceRegistry; -import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; -import org.hibernate.boot.registry.classloading.spi.ClassLoadingException; -import org.hibernate.engine.config.spi.ConfigurationService; -import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; -import org.hibernate.engine.jndi.spi.JndiService; -import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; import org.hibernate.service.spi.ServiceRegistryImplementor; import org.hibernate.service.spi.SessionFactoryServiceRegistry; import org.hibernate.service.spi.SessionFactoryServiceRegistryFactory; @@ -150,6 +153,7 @@ import org.hibernate.tuple.entity.EntityTuplizer; import org.hibernate.type.AssociationType; import org.hibernate.type.Type; import org.hibernate.type.TypeResolver; +import org.jboss.logging.Logger; /** @@ -531,6 +535,15 @@ public final class SessionFactoryImpl LOG.debug( "Instantiated session factory" ); + settings.getMultiTableBulkIdStrategy().prepare( + jdbcServices, + buildLocalConnectionAccess(), + cfg.createMappings(), + cfg.buildMapping(), + properties + ); + + if ( settings.isAutoCreateSchema() ) { new SchemaExport( serviceRegistry, cfg ) .setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) ) @@ -557,7 +570,7 @@ public final class SessionFactoryImpl String sep = ""; for ( Map.Entry entry : errors.entrySet() ) { LOG.namedQueryError( entry.getKey(), entry.getValue() ); - failingQueries.append( entry.getKey() ).append( sep ); + failingQueries.append( sep ).append( entry.getKey() ); sep = ", "; } throw new HibernateException( failingQueries.toString() ); @@ -616,6 +629,32 @@ public final class SessionFactoryImpl } } + private JdbcConnectionAccess buildLocalConnectionAccess() { + return new JdbcConnectionAccess() { + @Override + public Connection obtainConnection() throws SQLException { + return settings.getMultiTenancyStrategy() == MultiTenancyStrategy.NONE + ? serviceRegistry.getService( ConnectionProvider.class ).getConnection() + : serviceRegistry.getService( MultiTenantConnectionProvider.class ).getAnyConnection(); + } + + @Override + public void releaseConnection(Connection connection) throws SQLException { + if ( settings.getMultiTenancyStrategy() == MultiTenancyStrategy.NONE ) { + serviceRegistry.getService( ConnectionProvider.class ).closeConnection( connection ); + } + else { + serviceRegistry.getService( MultiTenantConnectionProvider.class ).releaseAnyConnection( connection ); + } + } + + @Override + public boolean supportsAggressiveRelease() { + return false; + } + }; + } + protected JpaMetaModelPopulationSetting determineJpaMetaModelPopulationSetting(Configuration cfg) { final String setting = cfg.getProperties().getProperty( AvailableSettings.JPA_METAMODEL_POPULATION ); return JpaMetaModelPopulationSetting.parse( setting ); @@ -1420,6 +1459,8 @@ public final class SessionFactoryImpl isClosed = true; + settings.getMultiTableBulkIdStrategy().release( jdbcServices, buildLocalConnectionAccess() ); + Iterator iter = entityPersisters.values().iterator(); while ( iter.hasNext() ) { EntityPersister p = (EntityPersister) iter.next(); diff --git a/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java b/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java index 0fdef1d261..01d3066b1f 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/SessionImpl.java @@ -180,6 +180,8 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, SessionImpl.class.getName()); + private static final boolean tracing = LOG.isTraceEnabled(); + private transient long timestamp; private transient SessionOwner sessionOwner; @@ -309,7 +311,8 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc factory.getStatisticsImplementor().openSession(); } - LOG.debugf( "Opened session at timestamp: %s", timestamp ); + if (tracing) + LOG.tracef( "Opened session at timestamp: %s", timestamp ); } @Override @@ -2509,12 +2512,12 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc // synchronization (this process) was disabled return; } - if ( ! isTransactionInProgress() ) { - // not in a transaction so skip synchronization + if ( entityPersister.getEntityMetamodel().hasImmutableNaturalId() ) { + // only mutable natural-ids need this processing return; } - if ( entityPersister.getEntityMetamodel().hasImmutableNaturalId() ) { - // only mutable natural-ids need this processing + if ( ! isTransactionInProgress() ) { + // not in a transaction so skip synchronization return; } @@ -2523,6 +2526,16 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc final Object entity = getPersistenceContext().getEntity( entityKey ); final EntityEntry entry = getPersistenceContext().getEntry( entity ); + if ( entry == null ) { + if ( LOG.isDebugEnabled() ) { + LOG.debug( + "Cached natural-id/pk resolution linked to null EntityEntry in persistence context : " + + MessageHelper.infoString( entityPersister, pk, getFactory() ) + ); + } + continue; + } + if ( !entry.requiresDirtyCheck( entity ) ) { continue; } diff --git a/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java b/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java index 10bafd49e7..19f3e64833 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java @@ -24,6 +24,7 @@ */ package org.hibernate.internal.util; +import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; @@ -66,6 +67,17 @@ public final class StringHelper { return buf.toString(); } + public static String joinWithQualifier(String[] values, String qualifier, String deliminator) { + int length = values.length; + if ( length == 0 ) return ""; + StringBuilder buf = new StringBuilder( length * values[0].length() ) + .append( qualify( qualifier, values[0] ) ); + for ( int i = 1; i < length; i++ ) { + buf.append( deliminator ).append( qualify( qualifier, values[i] ) ); + } + return buf.toString(); + } + public static String join(String seperator, Iterator objects) { StringBuilder buf = new StringBuilder(); if ( objects.hasNext() ) buf.append( objects.next() ); @@ -89,6 +101,15 @@ public final class StringHelper { return buf.toString(); } + public static String repeat(String string, int times, String deliminator) { + StringBuilder buf = new StringBuilder( ( string.length() * times ) + ( deliminator.length() * (times-1) ) ) + .append( string ); + for ( int i = 1; i < times; i++ ) { + buf.append( deliminator ).append( string ); + } + return buf.toString(); + } + public static String repeat(char character, int times) { char[] buffer = new char[times]; Arrays.fill( buffer, character ); @@ -661,4 +682,69 @@ public final class StringHelper { } return unquoted; } + + + public static final String BATCH_ID_PLACEHOLDER = "$$BATCH_ID_PLACEHOLDER$$"; + + public static StringBuilder buildBatchFetchRestrictionFragment( + String alias, + String[] columnNames, + Dialect dialect) { + // the general idea here is to just insert a placeholder that we can easily find later... + if ( columnNames.length == 1 ) { + // non-composite key + return new StringBuilder( StringHelper.qualify( alias, columnNames[0] ) ) + .append( " in (" ).append( BATCH_ID_PLACEHOLDER ).append( ")" ); + } + else { + // composite key - the form to use here depends on what the dialect supports. + if ( dialect.supportsRowValueConstructorSyntaxInInList() ) { + // use : (col1, col2) in ( (?,?), (?,?), ... ) + StringBuilder builder = new StringBuilder(); + builder.append( "(" ); + boolean firstPass = true; + String deliminator = ""; + for ( String columnName : columnNames ) { + builder.append( deliminator ).append( StringHelper.qualify( alias, columnName ) ); + if ( firstPass ) { + firstPass = false; + deliminator = ","; + } + } + builder.append( ") in (" ); + builder.append( BATCH_ID_PLACEHOLDER ); + builder.append( ")" ); + return builder; + } + else { + // use : ( (col1 = ? and col2 = ?) or (col1 = ? and col2 = ?) or ... ) + // unfortunately most of this building needs to be held off until we know + // the exact number of ids :( + return new StringBuilder( "(" ).append( BATCH_ID_PLACEHOLDER ).append( ")" ); + } + } + } + + public static String expandBatchIdPlaceholder( + String sql, + Serializable[] ids, + String alias, + String[] keyColumnNames, + Dialect dialect) { + if ( keyColumnNames.length == 1 ) { + // non-composite + return StringHelper.replace( sql, BATCH_ID_PLACEHOLDER, repeat( "?", ids.length, "," ) ); + } + else { + // composite + if ( dialect.supportsRowValueConstructorSyntaxInInList() ) { + final String tuple = "(" + StringHelper.repeat( "?", keyColumnNames.length, "," ); + return StringHelper.replace( sql, BATCH_ID_PLACEHOLDER, repeat( tuple, ids.length, "," ) ); + } + else { + final String keyCheck = joinWithQualifier( keyColumnNames, alias, " and " ); + return replace( sql, BATCH_ID_PLACEHOLDER, repeat( keyCheck, ids.length, " or " ) ); + } + } + } } diff --git a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java index f2eb8cb6af..f43e35b6b4 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java @@ -24,6 +24,7 @@ */ package org.hibernate.internal.util.collections; +import java.io.Serializable; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Arrays; @@ -31,6 +32,7 @@ import java.util.Collection; import java.util.Iterator; import java.util.List; +import org.hibernate.HibernateException; import org.hibernate.LockMode; import org.hibernate.LockOptions; import org.hibernate.type.Type; @@ -372,10 +374,43 @@ public final class ArrayHelper { } return true; } + + public static Serializable[] extractNonNull(Serializable[] array) { + final int nonNullCount = countNonNull( array ); + final Serializable[] result = new Serializable[nonNullCount]; + int i = 0; + for ( Serializable element : array ) { + if ( element != null ) { + result[i++] = element; + } + } + if ( i != nonNullCount ) { + throw new HibernateException( "Number of non-null elements varied between iterations" ); + } + return result; + } + + public static int countNonNull(Serializable[] array) { + int i = 0; + for ( Serializable element : array ) { + if ( element != null ) { + i++; + } + } + return i; + } + + public static void main(String... args) { + int[] batchSizes = ArrayHelper.getBatchSizes( 32 ); + + System.out.println( "Forward ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" ); + for ( int i = 0; i < batchSizes.length; i++ ) { + System.out.println( "[" + i + "] -> " + batchSizes[i] ); + } + + System.out.println( "Backward ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" ); + for ( int i = batchSizes.length-1; i >= 0; i-- ) { + System.out.println( "[" + i + "] -> " + batchSizes[i] ); + } + } } - - - - - - diff --git a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java index 1d23c246f9..6839eb47c0 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/util/collections/IdentityMap.java @@ -195,10 +195,10 @@ public final class IdentityMap implements Map { } public static final class IdentityMapEntry implements java.util.Map.Entry { - private K key; + private final K key; private V value; - IdentityMapEntry(K key, V value) { + IdentityMapEntry(final K key, final V value) { this.key=key; this.value=value; } @@ -211,33 +211,54 @@ public final class IdentityMap implements Map { return value; } - public V setValue(V value) { + public V setValue(final V value) { V result = this.value; this.value = value; return result; } } + /** + * We need to base the identity on {@link System#identityHashCode(Object)} but + * attempt to lazily initialize and cache this value: being a native invocation + * it is an expensive value to retrieve. + */ public static final class IdentityKey implements Serializable { - private K key; + + private final K key; + private int hash = 0; IdentityKey(K key) { - this.key=key; + this.key = key; } @SuppressWarnings( {"EqualsWhichDoesntCheckParameterClass"}) @Override - public boolean equals(Object other) { + public boolean equals(Object other) { return key == ( (IdentityKey) other ).key; } @Override - public int hashCode() { - return System.identityHashCode(key); + public int hashCode() { + if ( this.hash == 0 ) { + //We consider "zero" as non-initialized value + final int newHash = System.identityHashCode( key ); + if ( newHash == 0 ) { + //So make sure we don't store zeros as it would trigger initialization again: + //any value is fine as long as we're deterministic. + this.hash = -1; + return -1; + } + else { + this.hash = newHash; + return newHash; + } + } + return hash; } @Override - public String toString() { + public String toString() { return key.toString(); } diff --git a/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java b/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java index 23c5a7b01a..673741e8fb 100755 --- a/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/AbstractEntityJoinWalker.java @@ -187,10 +187,7 @@ public abstract class AbstractEntityJoinWalker extends JoinWalker { public abstract String getComment(); @Override - protected boolean isDuplicateAssociation( - final String foreignKeyTable, - final String[] foreignKeyColumns - ) { + protected boolean isDuplicateAssociation(final String foreignKeyTable, final String[] foreignKeyColumns) { //disable a join back to this same association final boolean isSameJoin = persister.getTableName().equals( foreignKeyTable ) && @@ -201,11 +198,11 @@ public abstract class AbstractEntityJoinWalker extends JoinWalker { - protected final Loadable getPersister() { + public final Loadable getPersister() { return persister; } - protected final String getAlias() { + public final String getAlias() { return alias; } diff --git a/hibernate-core/src/main/java/org/hibernate/loader/BatchFetchStyle.java b/hibernate-core/src/main/java/org/hibernate/loader/BatchFetchStyle.java new file mode 100644 index 0000000000..a429408cec --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/loader/BatchFetchStyle.java @@ -0,0 +1,90 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.loader; + +import org.jboss.logging.Logger; + +/** + * Defines the style that should be used to perform batch loading. Which style to use is declared using + * the "{@value org.hibernate.cfg.AvailableSettings#BATCH_FETCH_STYLE}" + * ({@link org.hibernate.cfg.AvailableSettings#BATCH_FETCH_STYLE}) setting + * + * @author Steve Ebersole + */ +public enum BatchFetchStyle { + /** + * The legacy algorithm where we keep a set of pre-built batch sizes based on + * {@link org.hibernate.internal.util.collections.ArrayHelper#getBatchSizes}. Batches are performed + * using the next-smaller pre-built batch size from the number of existing batchable identifiers. + *

+ * For example, with a batch-size setting of 32 the pre-built batch sizes would be [32, 16, 10, 9, 8, 7, .., 1]. + * An attempt to batch load 31 identifiers would result in batches of 16, 10, and 5. + */ + LEGACY, + /** + * Still keeps the concept of pre-built batch sizes, but uses the next-bigger batch size and pads the extra + * identifier placeholders. + *

+ * Using the same example of a batch-size setting of 32 the pre-built batch sizes would be the same. However, the + * attempt to batch load 31 identifiers would result just a single batch of size 32. The identifiers to load would + * be "padded" (aka, repeated) to make up the difference. + */ + PADDED, + /** + * Dynamically builds its SQL based on the actual number of available ids. Does still limit to the batch-size + * defined on the entity/collection + */ + DYNAMIC; + + private static final Logger log = Logger.getLogger( BatchFetchStyle.class ); + + public static BatchFetchStyle byName(String name) { + return valueOf( name.toUpperCase() ); + } + + public static BatchFetchStyle interpret(Object setting) { + log.tracef( "Interpreting BatchFetchStyle from setting : %s", setting ); + + if ( setting == null ) { + return LEGACY; // as default + } + + if ( BatchFetchStyle.class.isInstance( setting ) ) { + return (BatchFetchStyle) setting; + } + + try { + final BatchFetchStyle byName = byName( setting.toString() ); + if ( byName != null ) { + return byName; + } + } + catch (Exception ignore) { + } + + log.debugf( "Unable to interpret given setting [%s] as BatchFetchStyle", setting ); + + return LEGACY; // again as default. + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java b/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java index 3aaa7240fc..bf2b022679 100755 --- a/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/JoinWalker.java @@ -539,6 +539,34 @@ public class JoinWalker { ); } } + + // if the entity has a composite identifier, see if we need to handle + // its sub-properties separately + final Type idType = persister.getIdentifierType(); + if ( idType.isComponentType() ) { + final CompositeType cidType = (CompositeType) idType; + if ( cidType.isEmbedded() ) { + // we have an embedded composite identifier. Most likely we need to process the composite + // properties separately, although there is an edge case where the identifier is really + // a simple identifier (single value) wrapped in a JPA @IdClass or even in the case of a + // a simple identifier (single value) wrapped in a Hibernate composite type. + // + // We really do not have a built-in method to determine that. However, generally the + // persister would report that there is single, physical identifier property which is + // explicitly at odds with the notion of "embedded composite". So we use that for now + if ( persister.getEntityMetamodel().getIdentifierProperty().isEmbedded() ) { + walkComponentTree( + cidType, + -1, + 0, + persister, + alias, + path, + currentDepth + ); + } + } + } } /** diff --git a/hibernate-core/src/main/java/org/hibernate/loader/Loader.java b/hibernate-core/src/main/java/org/hibernate/loader/Loader.java index 968d0e899b..2370a7c921 100644 --- a/hibernate-core/src/main/java/org/hibernate/loader/Loader.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/Loader.java @@ -119,7 +119,7 @@ public abstract class Loader { * * @return The sql command this loader should use to get its {@link ResultSet}. */ - protected abstract String getSQLString(); + public abstract String getSQLString(); /** * An array of persisters of entity classes contained in each row of results; @@ -256,7 +256,7 @@ public abstract class Loader { * persister from each row of the ResultSet. If an object is supplied, will attempt to * initialize that object. If a collection is supplied, attempt to initialize that collection. */ - private List doQueryAndInitializeNonLazyCollections( + public List doQueryAndInitializeNonLazyCollections( final SessionImplementor session, final QueryParameters queryParameters, final boolean returnProxies) throws HibernateException, SQLException { @@ -268,7 +268,7 @@ public abstract class Loader { ); } - private List doQueryAndInitializeNonLazyCollections( + public List doQueryAndInitializeNonLazyCollections( final SessionImplementor session, final QueryParameters queryParameters, final boolean returnProxies, @@ -381,12 +381,21 @@ public abstract class Loader { hydratedObjects, loadedKeys, returnProxies + ); + if ( ! keyToRead.equals( loadedKeys[0] ) ) { + throw new AssertionFailure( + String.format( + "Unexpected key read for row; expected [%s]; actual [%s]", + keyToRead, + loadedKeys[0] ) ); + } if ( result == null ) { result = loaded; } } - while ( keyToRead.equals( loadedKeys[0] ) && resultSet.next() ); + while ( resultSet.next() && + isCurrentRowForSameEntity( keyToRead, 0, resultSet, session ) ); } catch ( SQLException sqle ) { throw factory.getSQLExceptionHelper().convert( @@ -406,6 +415,17 @@ public abstract class Loader { return result; } + private boolean isCurrentRowForSameEntity( + final EntityKey keyToRead, + final int persisterIndex, + final ResultSet resultSet, + final SessionImplementor session) throws SQLException { + EntityKey currentRowKey = getKeyFromResultSet( + persisterIndex, getEntityPersisters()[persisterIndex], null, resultSet, session + ); + return keyToRead.equals( currentRowKey ); + } + /** * Loads a single logical row from the result set moving forward. This is the * processing used from the ScrollableResults where there were collection fetches @@ -1017,7 +1037,16 @@ public abstract class Loader { } } } - + + // Until this entire method is refactored w/ polymorphism, postLoad was + // split off from initializeEntity. It *must* occur after + // endCollectionLoad to ensure the collection is in the + // persistence context. + if ( hydratedObjects!=null ) { + for ( Object hydratedObject : hydratedObjects ) { + TwoPhaseLoad.postLoad( hydratedObject, session, post ); + } + } } private void endCollectionLoad( @@ -1693,8 +1722,17 @@ public abstract class Loader { final QueryParameters queryParameters, final boolean scroll, final SessionImplementor session) throws SQLException { + return executeQueryStatement( getSQLString(), queryParameters, scroll, session ); + } + + protected ResultSet executeQueryStatement( + final String sqlStatement, + final QueryParameters queryParameters, + final boolean scroll, + final SessionImplementor session) throws SQLException { + // Processing query filters. - queryParameters.processFilters( getSQLString(), session ); + queryParameters.processFilters( sqlStatement, session ); // Applying LIMIT clause. final LimitHandler limitHandler = getLimitHandler( diff --git a/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java index 6b41233f10..9d295cbe25 100644 --- a/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/OuterJoinLoader.java @@ -75,7 +75,8 @@ public abstract class OuterJoinLoader extends BasicLoader { return collectionSuffixes; } - protected final String getSQLString() { + @Override + public final String getSQLString() { return sql; } diff --git a/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java b/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java index 413e74d084..e659d2b5de 100644 --- a/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/collection/BatchingCollectionInitializer.java @@ -1,10 +1,10 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * - * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as + * Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Middleware LLC. + * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU @@ -20,106 +20,35 @@ * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA - * */ package org.hibernate.loader.collection; -import java.io.Serializable; -import org.hibernate.HibernateException; -import org.hibernate.MappingException; -import org.hibernate.engine.spi.LoadQueryInfluencers; -import org.hibernate.engine.spi.SessionFactoryImplementor; -import org.hibernate.engine.spi.SessionImplementor; -import org.hibernate.internal.util.collections.ArrayHelper; -import org.hibernate.loader.Loader; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.QueryableCollection; /** - * "Batch" loads collections, using multiple foreign key values in the - * SQL where clause. + * The base contract for loaders capable of performing batch-fetch loading of collections using multiple foreign key + * values in the SQL WHERE clause. * + * @author Gavin King + * @author Steve Ebersole + * + * @see BatchingCollectionInitializerBuilder * @see BasicCollectionLoader * @see OneToManyLoader - * @author Gavin King */ -public class BatchingCollectionInitializer implements CollectionInitializer { - private final Loader[] loaders; - private final int[] batchSizes; - private final CollectionPersister collectionPersister; +public abstract class BatchingCollectionInitializer implements CollectionInitializer { + private final QueryableCollection collectionPersister; - public BatchingCollectionInitializer(CollectionPersister collPersister, int[] batchSizes, Loader[] loaders) { - this.loaders = loaders; - this.batchSizes = batchSizes; - this.collectionPersister = collPersister; + public BatchingCollectionInitializer(QueryableCollection collectionPersister) { + this.collectionPersister = collectionPersister; } public CollectionPersister getCollectionPersister() { return collectionPersister; } - public Loader[] getLoaders() { - return loaders; + public QueryableCollection collectionPersister() { + return collectionPersister; } - - public int[] getBatchSizes() { - return batchSizes; - } - - public void initialize(Serializable id, SessionImplementor session) - throws HibernateException { - - Serializable[] batch = session.getPersistenceContext().getBatchFetchQueue() - .getCollectionBatch( collectionPersister, id, batchSizes[0] ); - - for ( int i=0; i 1 ) { - int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize); - Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ]; - for ( int i=0; i 1 ) { - int[] batchSizesToCreate = ArrayHelper.getBatchSizes( maxBatchSize ); - Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ]; - for ( int i=0; i= numberOfIds ) { + indexToUse = i; + } + else { + break; + } + } + + final Serializable[] idsToLoad = new Serializable[ batchSizes[indexToUse] ]; + System.arraycopy( batch, 0, idsToLoad, 0, numberOfIds ); + for ( int i = numberOfIds; i < batchSizes[indexToUse]; i++ ) { + idsToLoad[i] = id; + } + + loaders[indexToUse].loadCollectionBatch( session, idsToLoad, collectionPersister().getKeyType() ); + } + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java index ff48d3ec19..8eaca13cb2 100755 --- a/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/custom/CustomLoader.java @@ -277,7 +277,7 @@ public class CustomLoader extends Loader { } @Override - protected String getSQLString() { + public String getSQLString() { return sql; } diff --git a/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java index ef7469aa8f..bde9a8c24a 100644 --- a/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/entity/BatchingEntityLoader.java @@ -1,10 +1,10 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * - * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as + * Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Middleware LLC. + * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU @@ -20,135 +20,109 @@ * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA - * */ package org.hibernate.loader.entity; import java.io.Serializable; -import java.util.Iterator; +import java.sql.SQLException; +import java.util.Arrays; import java.util.List; -import org.hibernate.LockMode; +import org.jboss.logging.Logger; + import org.hibernate.LockOptions; -import org.hibernate.MappingException; -import org.hibernate.engine.spi.LoadQueryInfluencers; -import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.spi.QueryParameters; import org.hibernate.engine.spi.SessionImplementor; -import org.hibernate.internal.util.collections.ArrayHelper; import org.hibernate.loader.Loader; import org.hibernate.persister.entity.EntityPersister; -import org.hibernate.persister.entity.OuterJoinLoadable; +import org.hibernate.pretty.MessageHelper; import org.hibernate.type.Type; /** - * "Batch" loads entities, using multiple primary key values in the - * SQL where clause. + * The base contract for loaders capable of performing batch-fetch loading of entities using multiple primary key + * values in the SQL WHERE clause. * - * @see EntityLoader * @author Gavin King + * @author Steve Ebersole + * + * @see BatchingEntityLoaderBuilder + * @see UniqueEntityLoader */ -public class BatchingEntityLoader implements UniqueEntityLoader { +public abstract class BatchingEntityLoader implements UniqueEntityLoader { + private static final Logger log = Logger.getLogger( BatchingEntityLoader.class ); - private final Loader[] loaders; - private final int[] batchSizes; private final EntityPersister persister; - private final Type idType; - public BatchingEntityLoader(EntityPersister persister, int[] batchSizes, Loader[] loaders) { - this.batchSizes = batchSizes; - this.loaders = loaders; + public BatchingEntityLoader(EntityPersister persister) { this.persister = persister; - idType = persister.getIdentifierType(); } - private Object getObjectFromList(List results, Serializable id, SessionImplementor session) { - // get the right object from the list ... would it be easier to just call getEntity() ?? - Iterator iter = results.iterator(); - while ( iter.hasNext() ) { - Object obj = iter.next(); - final boolean equal = idType.isEqual( + public EntityPersister persister() { + return persister; + } + + @Override + @Deprecated + public Object load(Serializable id, Object optionalObject, SessionImplementor session) { + return load( id, optionalObject, session, LockOptions.NONE ); + } + + protected QueryParameters buildQueryParameters( + Serializable id, + Serializable[] ids, + Object optionalObject, + LockOptions lockOptions) { + Type[] types = new Type[ids.length]; + Arrays.fill( types, persister().getIdentifierType() ); + + QueryParameters qp = new QueryParameters(); + qp.setPositionalParameterTypes( types ); + qp.setPositionalParameterValues( ids ); + qp.setOptionalObject( optionalObject ); + qp.setOptionalEntityName( persister().getEntityName() ); + qp.setOptionalId( id ); + qp.setLockOptions( lockOptions ); + return qp; + } + + protected Object getObjectFromList(List results, Serializable id, SessionImplementor session) { + for ( Object obj : results ) { + final boolean equal = persister.getIdentifierType().isEqual( id, - session.getContextEntityIdentifier(obj), + session.getContextEntityIdentifier( obj ), session.getFactory() ); - if ( equal ) return obj; + if ( equal ) { + return obj; + } } return null; } - /** - * {@inheritDoc} - */ - public Object load(Serializable id, Object optionalObject, SessionImplementor session) { - // this form is deprecated! - return load( id, optionalObject, session, LockOptions.NONE ); - } - - public Object load(Serializable id, Object optionalObject, SessionImplementor session, LockOptions lockOptions) { - Serializable[] batch = session.getPersistenceContext() - .getBatchFetchQueue() - .getEntityBatch( persister, id, batchSizes[0], persister.getEntityMode() ); - - for ( int i=0; i1 ) { - int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize); - Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ]; - for ( int i=0; i1 ) { - int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize); - Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ]; - for ( int i=0; i= numberOfIds ) { + indexToUse = i; + } + else { + break; + } + } + + final Serializable[] idsToLoad = new Serializable[ batchSizes[indexToUse] ]; + System.arraycopy( batch, 0, idsToLoad, 0, numberOfIds ); + for ( int i = numberOfIds; i < batchSizes[indexToUse]; i++ ) { + idsToLoad[i] = id; + } + + return doBatchLoad( id, loaders[indexToUse], session, idsToLoad, optionalObject, lockOptions ); + } + } + +} diff --git a/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java index 4e70150733..f3e7982ffd 100644 --- a/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/entity/UniqueEntityLoader.java @@ -43,6 +43,7 @@ public interface UniqueEntityLoader { * @deprecated use {@link #load(java.io.Serializable, Object, SessionImplementor, LockOptions)} instead. */ @SuppressWarnings( {"JavaDoc"}) + @Deprecated public Object load(Serializable id, Object optionalObject, SessionImplementor session) throws HibernateException; /** diff --git a/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java b/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java index 7e845d9b04..18ee1d5f95 100644 --- a/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java +++ b/hibernate-core/src/main/java/org/hibernate/loader/hql/QueryLoader.java @@ -239,7 +239,7 @@ public class QueryLoader extends BasicLoader { /** * The SQL query string to be called. */ - protected String getSQLString() { + public String getSQLString() { return queryTranslator.getSQLString(); } diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/Column.java b/hibernate-core/src/main/java/org/hibernate/mapping/Column.java index 44dfc49d54..569b08e30d 100644 --- a/hibernate-core/src/main/java/org/hibernate/mapping/Column.java +++ b/hibernate-core/src/main/java/org/hibernate/mapping/Column.java @@ -343,7 +343,8 @@ public class Column implements Selectable, Serializable, Cloneable { /** * Shallow copy, the value is not copied */ - protected Object clone() { + @Override + public Column clone() { Column copy = new Column(); copy.setLength( length ); copy.setScale( scale ); diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java b/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java index 851f53e724..e8a299dd48 100644 --- a/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java +++ b/hibernate-core/src/main/java/org/hibernate/mapping/PersistentClass.java @@ -768,14 +768,14 @@ public abstract class PersistentClass implements Serializable, Filterable, MetaA } public void prepareTemporaryTables(Mapping mapping, Dialect dialect) { + temporaryIdTableName = dialect.generateTemporaryTableName( getTable().getName() ); if ( dialect.supportsTemporaryTables() ) { - temporaryIdTableName = dialect.generateTemporaryTableName( getTable().getName() ); Table table = new Table(); table.setName( temporaryIdTableName ); Iterator itr = getTable().getPrimaryKey().getColumnIterator(); while( itr.hasNext() ) { Column column = (Column) itr.next(); - table.addColumn( (Column) column.clone() ); + table.addColumn( column.clone() ); } temporaryIdTableDDL = table.sqlTemporaryTableCreateString( dialect, mapping ); } diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/Property.java b/hibernate-core/src/main/java/org/hibernate/mapping/Property.java index 6cb91d0f4e..5e0f9c5317 100644 --- a/hibernate-core/src/main/java/org/hibernate/mapping/Property.java +++ b/hibernate-core/src/main/java/org/hibernate/mapping/Property.java @@ -62,6 +62,7 @@ public class Property implements Serializable, MetaAttributable { private java.util.Map metaAttributes; private PersistentClass persistentClass; private boolean naturalIdentifier; + private boolean lob; public boolean isBackRef() { return false; @@ -343,4 +344,12 @@ public class Property implements Serializable, MetaAttributable { this.naturalIdentifier = naturalIdentifier; } + public boolean isLob() { + return lob; + } + + public void setLob(boolean lob) { + this.lob = lob; + } + } diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java b/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java index 99f3b003dd..488fe357c7 100644 --- a/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java +++ b/hibernate-core/src/main/java/org/hibernate/mapping/SimpleValue.java @@ -24,7 +24,6 @@ package org.hibernate.mapping; import java.lang.annotation.Annotation; -import java.lang.reflect.Field; import java.lang.reflect.TypeVariable; import java.sql.CallableStatement; import java.sql.PreparedStatement; @@ -34,13 +33,12 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Properties; -import javax.persistence.AttributeConverter; -import org.jboss.logging.Logger; +import javax.persistence.AttributeConverter; import org.hibernate.FetchMode; import org.hibernate.MappingException; -import org.hibernate.cfg.AccessType; +import org.hibernate.annotations.common.reflection.XProperty; import org.hibernate.cfg.AttributeConverterDefinition; import org.hibernate.cfg.Environment; import org.hibernate.cfg.Mappings; @@ -51,7 +49,6 @@ import org.hibernate.id.IdentityGenerator; import org.hibernate.id.PersistentIdentifierGenerator; import org.hibernate.id.factory.IdentifierGeneratorFactory; import org.hibernate.internal.util.ReflectHelper; -import org.hibernate.property.DirectPropertyAccessor; import org.hibernate.type.AbstractSingleColumnStandardBasicType; import org.hibernate.type.Type; import org.hibernate.type.descriptor.ValueBinder; @@ -65,6 +62,7 @@ import org.hibernate.type.descriptor.sql.JdbcTypeJavaClassMappings; import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; import org.hibernate.type.descriptor.sql.SqlTypeDescriptorRegistry; import org.hibernate.usertype.DynamicParameterizedType; +import org.jboss.logging.Logger; /** * Any value that maps to columns. @@ -527,29 +525,26 @@ public class SimpleValue implements KeyValue { columnsNames[i] = ( (Column) columns.get( i ) ).getName(); } - AccessType accessType = AccessType.getAccessStrategy( typeParameters - .getProperty( DynamicParameterizedType.ACCESS_TYPE ) ); - final Class classEntity = ReflectHelper.classForName( typeParameters - .getProperty( DynamicParameterizedType.ENTITY ) ); - final String propertyName = typeParameters.getProperty( DynamicParameterizedType.PROPERTY ); - - Annotation[] annotations; - if ( accessType == AccessType.FIELD ) { - annotations = ( (Field) new DirectPropertyAccessor().getGetter( classEntity, propertyName ).getMember() ) - .getAnnotations(); - - } - else { - annotations = ReflectHelper.getGetter( classEntity, propertyName ).getMethod().getAnnotations(); - } + final XProperty xProperty = (XProperty) typeParameters.get( DynamicParameterizedType.XPROPERTY ); + // todo : not sure this works for handling @MapKeyEnumerated + final Annotation[] annotations = xProperty == null + ? null + : xProperty.getAnnotations(); typeParameters.put( DynamicParameterizedType.PARAMETER_TYPE, - new ParameterTypeImpl( ReflectHelper.classForName( typeParameters - .getProperty( DynamicParameterizedType.RETURNED_CLASS ) ), annotations, table.getCatalog(), - table.getSchema(), table.getName(), Boolean.valueOf( typeParameters - .getProperty( DynamicParameterizedType.IS_PRIMARY_KEY ) ), columnsNames ) ); - + new ParameterTypeImpl( + ReflectHelper.classForName( + typeParameters.getProperty( DynamicParameterizedType.RETURNED_CLASS ) + ), + annotations, + table.getCatalog(), + table.getSchema(), + table.getName(), + Boolean.valueOf( typeParameters.getProperty( DynamicParameterizedType.IS_PRIMARY_KEY ) ), + columnsNames + ) + ); } catch ( ClassNotFoundException cnfe ) { throw new MappingException( "Could not create DynamicParameterizedType for type: " + typeName, cnfe ); @@ -612,4 +607,4 @@ public class SimpleValue implements KeyValue { return columns; } } -} \ No newline at end of file +} diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/Table.java b/hibernate-core/src/main/java/org/hibernate/mapping/Table.java index 4c2e96903d..3b34475107 100644 --- a/hibernate-core/src/main/java/org/hibernate/mapping/Table.java +++ b/hibernate-core/src/main/java/org/hibernate/mapping/Table.java @@ -328,6 +328,36 @@ public class Table implements RelationalModel, Serializable { && uniqueKey.getColumns().containsAll( primaryKey.getColumns() ); } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + + ((catalog == null) ? 0 : isCatalogQuoted() ? catalog.hashCode() : catalog.toLowerCase().hashCode()); + result = prime * result + ((name == null) ? 0 : isQuoted() ? name.hashCode() : name.toLowerCase().hashCode()); + result = prime * result + + ((schema == null) ? 0 : isSchemaQuoted() ? schema.hashCode() : schema.toLowerCase().hashCode()); + return result; + } + + @Override + public boolean equals(Object object) { + return object instanceof Table && equals((Table) object); + } + + public boolean equals(Table table) { + if (null == table) { + return false; + } + if (this == table) { + return true; + } + + return isQuoted() ? name.equals(table.getName()) : name.equalsIgnoreCase(table.getName()) + && ((schema == null && table.getSchema() != null) ? false : (schema == null) ? true : isSchemaQuoted() ? schema.equals(table.getSchema()) : schema.equalsIgnoreCase(table.getSchema())) + && ((catalog == null && table.getCatalog() != null) ? false : (catalog == null) ? true : isCatalogQuoted() ? catalog.equals(table.getCatalog()) : catalog.equalsIgnoreCase(table.getCatalog())); + } + public void validateColumns(Dialect dialect, Mapping mapping, TableMetadata tableInfo) { Iterator iter = getColumnIterator(); while ( iter.hasNext() ) { @@ -394,7 +424,7 @@ public class Table implements RelationalModel, Serializable { boolean useUniqueConstraint = column.isUnique() && dialect.supportsUnique() && - ( !column.isNullable() || dialect.supportsNotNullUnique() ); + ( column.isNullable() || dialect.supportsNotNullUnique() ); if ( useUniqueConstraint ) { alter.append( " unique" ); } @@ -495,7 +525,7 @@ public class Table implements RelationalModel, Serializable { } boolean useUniqueConstraint = col.isUnique() && - ( !col.isNullable() || dialect.supportsNotNullUnique() ); + ( col.isNullable() || dialect.supportsNotNullUnique() ); if ( useUniqueConstraint ) { if ( dialect.supportsUnique() ) { buf.append( " unique" ); diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java index b64a7633bb..0094b30460 100644 --- a/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java +++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/AbstractCollectionPersister.java @@ -1569,7 +1569,7 @@ public abstract class AbstractCollectionPersister if ( LOG.isDebugEnabled() ) { LOG.debugf( "Inserting collection: %s", - MessageHelper.collectionInfoString( this, id, getFactory() ) ); + MessageHelper.collectionInfoString( this, collection, id, session ) ); } try { @@ -1662,7 +1662,7 @@ public abstract class AbstractCollectionPersister throw sqlExceptionHelper.convert( sqle, "could not insert collection: " + - MessageHelper.collectionInfoString( this, id, getFactory() ), + MessageHelper.collectionInfoString( this, collection, id, session ), getSQLInsertRowString() ); } @@ -1682,7 +1682,7 @@ public abstract class AbstractCollectionPersister if ( LOG.isDebugEnabled() ) { LOG.debugf( "Deleting rows of collection: %s", - MessageHelper.collectionInfoString( this, id, getFactory() ) ); + MessageHelper.collectionInfoString( this, collection, id, session ) ); } boolean deleteByIndex = !isOneToMany() && hasIndex && !indexContainsFormula; @@ -1770,7 +1770,7 @@ public abstract class AbstractCollectionPersister throw sqlExceptionHelper.convert( sqle, "could not delete collection rows: " + - MessageHelper.collectionInfoString( this, id, getFactory() ), + MessageHelper.collectionInfoString( this, collection, id, session ), getSQLDeleteRowString() ); } @@ -1789,7 +1789,7 @@ public abstract class AbstractCollectionPersister if ( !isInverse && isRowInsertEnabled() ) { if ( LOG.isDebugEnabled() ) LOG.debugf( "Inserting rows of collection: %s", - MessageHelper.collectionInfoString( this, id, getFactory() ) ); + MessageHelper.collectionInfoString( this, collection, id, session ) ); try { // insert all the new entries @@ -1869,7 +1869,7 @@ public abstract class AbstractCollectionPersister throw sqlExceptionHelper.convert( sqle, "could not insert collection rows: " + - MessageHelper.collectionInfoString( this, id, getFactory() ), + MessageHelper.collectionInfoString( this, collection, id, session ), getSQLInsertRowString() ); } @@ -2267,6 +2267,10 @@ public abstract class AbstractCollectionPersister return initializer; } + public int getBatchSize() { + return batchSize; + } + private class StandardOrderByAliasResolver implements OrderByAliasResolver { private final String rootAlias; @@ -2287,4 +2291,5 @@ public abstract class AbstractCollectionPersister } public abstract FilterAliasGenerator getFilterAliasGenerator(final String rootAlias); + } diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java index 3dfafd9b73..9af95ba2c2 100644 --- a/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java +++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/BasicCollectionPersister.java @@ -45,6 +45,7 @@ import org.hibernate.internal.util.collections.ArrayHelper; import org.hibernate.jdbc.Expectation; import org.hibernate.jdbc.Expectations; import org.hibernate.loader.collection.BatchingCollectionInitializer; +import org.hibernate.loader.collection.BatchingCollectionInitializerBuilder; import org.hibernate.loader.collection.CollectionInitializer; import org.hibernate.loader.collection.SubselectCollectionLoader; import org.hibernate.mapping.Collection; @@ -294,7 +295,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister { catch ( SQLException sqle ) { throw getSQLExceptionHelper().convert( sqle, - "could not update collection rows: " + MessageHelper.collectionInfoString( this, id, getFactory() ), + "could not update collection rows: " + MessageHelper.collectionInfoString( this, collection, id, session ), getSQLUpdateRowString() ); } @@ -342,7 +343,8 @@ public class BasicCollectionPersister extends AbstractCollectionPersister { @Override protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers) throws MappingException { - return BatchingCollectionInitializer.createBatchingCollectionInitializer( this, batchSize, getFactory(), loadQueryInfluencers ); + return BatchingCollectionInitializerBuilder.getBuilder( getFactory() ) + .createBatchingCollectionInitializer( this, batchSize, getFactory(), loadQueryInfluencers ); } @Override public String fromJoinFragment(String alias, boolean innerJoin, boolean includeSubclasses) { diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java index 4abbd97d50..81900d648e 100644 --- a/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java +++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java @@ -299,4 +299,5 @@ public interface CollectionPersister { public boolean indexExists(Serializable key, Object index, SessionImplementor session); public boolean elementExists(Serializable key, Object element, SessionImplementor session); public Object getElementByIndex(Serializable key, Object index, SessionImplementor session, Object owner); + public int getBatchSize(); } diff --git a/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java index fa010c168a..88e9618a48 100644 --- a/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java +++ b/hibernate-core/src/main/java/org/hibernate/persister/collection/OneToManyPersister.java @@ -44,6 +44,7 @@ import org.hibernate.internal.util.collections.ArrayHelper; import org.hibernate.jdbc.Expectation; import org.hibernate.jdbc.Expectations; import org.hibernate.loader.collection.BatchingCollectionInitializer; +import org.hibernate.loader.collection.BatchingCollectionInitializerBuilder; import org.hibernate.loader.collection.CollectionInitializer; import org.hibernate.loader.collection.SubselectOneToManyLoader; import org.hibernate.loader.entity.CollectionElementLoader; @@ -352,7 +353,7 @@ public class OneToManyPersister extends AbstractCollectionPersister { throw getFactory().getSQLExceptionHelper().convert( sqle, "could not update collection rows: " + - MessageHelper.collectionInfoString( this, id, getFactory() ), + MessageHelper.collectionInfoString( this, collection, id, session ), getSQLInsertRowString() ); } @@ -384,7 +385,8 @@ public class OneToManyPersister extends AbstractCollectionPersister { @Override protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers) throws MappingException { - return BatchingCollectionInitializer.createBatchingOneToManyInitializer( this, batchSize, getFactory(), loadQueryInfluencers ); + return BatchingCollectionInitializerBuilder.getBuilder( getFactory() ) + .createBatchingOneToManyInitializer( this, batchSize, getFactory(), loadQueryInfluencers ); } public String fromJoinFragment(String alias, diff --git a/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java index de6fee5548..a96f091e8f 100644 --- a/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java +++ b/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java @@ -92,6 +92,7 @@ import org.hibernate.jdbc.Expectation; import org.hibernate.jdbc.Expectations; import org.hibernate.jdbc.TooManyRowsAffectedException; import org.hibernate.loader.entity.BatchingEntityLoader; +import org.hibernate.loader.entity.BatchingEntityLoaderBuilder; import org.hibernate.loader.entity.CascadeEntityLoader; import org.hibernate.loader.entity.EntityLoader; import org.hibernate.loader.entity.UniqueEntityLoader; @@ -192,6 +193,8 @@ public abstract class AbstractEntityPersister private final boolean[][] propertyColumnInsertable; private final boolean[] propertyUniqueness; private final boolean[] propertySelectable; + + private final List lobProperties = new ArrayList(); //information about lazy properties of this class private final String[] lazyPropertyNames; @@ -638,6 +641,10 @@ public abstract class AbstractEntityPersister propertySelectable[i] = prop.isSelectable(); propertyUniqueness[i] = prop.getValue().isAlternateUniqueKey(); + + if (prop.isLob() && getFactory().getDialect().forceLobAsLastValue() ) { + lobProperties.add( i ); + } i++; @@ -2521,26 +2528,16 @@ public abstract class AbstractEntityPersister LockMode lockMode, LoadQueryInfluencers loadQueryInfluencers) throws MappingException { //TODO: disable batch loading if lockMode > READ? - return BatchingEntityLoader.createBatchingEntityLoader( - this, - batchSize, - lockMode, - getFactory(), - loadQueryInfluencers - ); + return BatchingEntityLoaderBuilder.getBuilder( getFactory() ) + .buildLoader( this, batchSize, lockMode, getFactory(), loadQueryInfluencers ); } protected UniqueEntityLoader createEntityLoader( LockOptions lockOptions, LoadQueryInfluencers loadQueryInfluencers) throws MappingException { //TODO: disable batch loading if lockMode > READ? - return BatchingEntityLoader.createBatchingEntityLoader( - this, - batchSize, - lockOptions, - getFactory(), - loadQueryInfluencers - ); + return BatchingEntityLoaderBuilder.getBuilder( getFactory() ) + .buildLoader( this, batchSize, lockOptions, getFactory(), loadQueryInfluencers ); } protected UniqueEntityLoader createEntityLoader(LockMode lockMode) throws MappingException { @@ -2597,12 +2594,26 @@ public abstract class AbstractEntityPersister boolean hasColumns = false; for ( int i = 0; i < entityMetamodel.getPropertySpan(); i++ ) { - if ( includeProperty[i] && isPropertyOfTable( i, j ) ) { + if ( includeProperty[i] && isPropertyOfTable( i, j ) + && !lobProperties.contains( i ) ) { // this is a property of the table, which we are updating - update.addColumns( getPropertyColumnNames(i), propertyColumnUpdateable[i], propertyColumnWriters[i] ); + update.addColumns( getPropertyColumnNames(i), + propertyColumnUpdateable[i], propertyColumnWriters[i] ); hasColumns = hasColumns || getPropertyColumnSpan( i ) > 0; } } + + // HHH-4635 + // Oracle expects all Lob properties to be last in inserts + // and updates. Insert them at the end. + for ( int i : lobProperties ) { + if ( includeProperty[i] && isPropertyOfTable( i, j ) ) { + // this property belongs on the table and is to be inserted + update.addColumns( getPropertyColumnNames(i), + propertyColumnUpdateable[i], propertyColumnWriters[i] ); + hasColumns = true; + } + } if ( j == 0 && isVersioned() && entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.VERSION ) { // this is the root (versioned) table, and we are using version-based @@ -2668,7 +2679,8 @@ public abstract class AbstractEntityPersister /** * Generate the SQL that inserts a row */ - protected String generateInsertString(boolean identityInsert, boolean[] includeProperty, int j) { + protected String generateInsertString(boolean identityInsert, + boolean[] includeProperty, int j) { // todo : remove the identityInsert param and variations; // identity-insert strings are now generated from generateIdentityInsertString() @@ -2678,9 +2690,13 @@ public abstract class AbstractEntityPersister // add normal properties for ( int i = 0; i < entityMetamodel.getPropertySpan(); i++ ) { - if ( includeProperty[i] && isPropertyOfTable( i, j ) ) { + + if ( includeProperty[i] && isPropertyOfTable( i, j ) + && !lobProperties.contains( i ) ) { // this property belongs on the table and is to be inserted - insert.addColumns( getPropertyColumnNames(i), propertyColumnInsertable[i], propertyColumnWriters[i] ); + insert.addColumns( getPropertyColumnNames(i), + propertyColumnInsertable[i], + propertyColumnWriters[i] ); } } @@ -2700,6 +2716,18 @@ public abstract class AbstractEntityPersister if ( getFactory().getSettings().isCommentsEnabled() ) { insert.setComment( "insert " + getEntityName() ); } + + // HHH-4635 + // Oracle expects all Lob properties to be last in inserts + // and updates. Insert them at the end. + for ( int i : lobProperties ) { + if ( includeProperty[i] && isPropertyOfTable( i, j ) ) { + // this property belongs on the table and is to be inserted + insert.addColumns( getPropertyColumnNames(i), + propertyColumnInsertable[i], + propertyColumnWriters[i] ); + } + } String result = insert.toStatementString(); @@ -2767,8 +2795,9 @@ public abstract class AbstractEntityPersister boolean[][] includeColumns, int j, PreparedStatement st, - SessionImplementor session) throws HibernateException, SQLException { - return dehydrate( id, fields, null, includeProperty, includeColumns, j, st, session, 1 ); + SessionImplementor session, + boolean isUpdate) throws HibernateException, SQLException { + return dehydrate( id, fields, null, includeProperty, includeColumns, j, st, session, 1, isUpdate ); } /** @@ -2783,32 +2812,58 @@ public abstract class AbstractEntityPersister final int j, final PreparedStatement ps, final SessionImplementor session, - int index) throws SQLException, HibernateException { + int index, + boolean isUpdate ) throws SQLException, HibernateException { if ( LOG.isTraceEnabled() ) { LOG.tracev( "Dehydrating entity: {0}", MessageHelper.infoString( this, id, getFactory() ) ); } for ( int i = 0; i < entityMetamodel.getPropertySpan(); i++ ) { - if ( includeProperty[i] && isPropertyOfTable( i, j ) ) { + if ( includeProperty[i] && isPropertyOfTable( i, j ) + && !lobProperties.contains( i )) { getPropertyTypes()[i].nullSafeSet( ps, fields[i], index, includeColumns[i], session ); - //index += getPropertyColumnSpan( i ); index += ArrayHelper.countTrue( includeColumns[i] ); //TODO: this is kinda slow... } } - - if ( rowId != null ) { - ps.setObject( index, rowId ); - index += 1; + + if ( !isUpdate ) { + index += dehydrateId( id, rowId, ps, session, index ); } - else if ( id != null ) { - getIdentifierType().nullSafeSet( ps, id, index, session ); - index += getIdentifierColumnSpan(); + + // HHH-4635 + // Oracle expects all Lob properties to be last in inserts + // and updates. Insert them at the end. + for ( int i : lobProperties ) { + if ( includeProperty[i] && isPropertyOfTable( i, j ) ) { + getPropertyTypes()[i].nullSafeSet( ps, fields[i], index, includeColumns[i], session ); + index += ArrayHelper.countTrue( includeColumns[i] ); //TODO: this is kinda slow... + } + } + + if ( isUpdate ) { + index += dehydrateId( id, rowId, ps, session, index ); } return index; } + + private int dehydrateId( + final Serializable id, + final Object rowId, + final PreparedStatement ps, + final SessionImplementor session, + int index ) throws SQLException { + if ( rowId != null ) { + ps.setObject( index, rowId ); + return 1; + } else if ( id != null ) { + getIdentifierType().nullSafeSet( ps, id, index, session ); + return getIdentifierColumnSpan(); + } + return 0; + } /** * Unmarshall the fields of a persistent instance from a result set, @@ -2949,7 +3004,7 @@ public abstract class AbstractEntityPersister Binder binder = new Binder() { public void bindValues(PreparedStatement ps) throws SQLException { - dehydrate( null, fields, notNull, propertyColumnInsertable, 0, ps, session ); + dehydrate( null, fields, notNull, propertyColumnInsertable, 0, ps, session, false ); } public Object getEntity() { return object; @@ -3046,7 +3101,7 @@ public abstract class AbstractEntityPersister // Write the values of fields onto the prepared statement - we MUST use the state at the time the // insert was issued (cos of foreign key constraints). Not necessarily the object's current state - dehydrate( id, fields, null, notNull, propertyColumnInsertable, j, insert, session, index ); + dehydrate( id, fields, null, notNull, propertyColumnInsertable, j, insert, session, index, false ); if ( useBatch ) { session.getTransactionCoordinator().getJdbcCoordinator().getBatch( inserBatchKey ).addToBatch(); @@ -3174,7 +3229,7 @@ public abstract class AbstractEntityPersister index+= expectation.prepare( update ); //Now write the values of fields onto the prepared statement - index = dehydrate( id, fields, rowId, includeProperty, propertyColumnUpdateable, j, update, session, index ); + index = dehydrate( id, fields, rowId, includeProperty, propertyColumnUpdateable, j, update, session, index, true ); // Write any appropriate versioning conditional parameters if ( useVersion && entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.VERSION ) { diff --git a/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java b/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java index 7da348880d..f519ecdc41 100644 --- a/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java @@ -25,7 +25,9 @@ package org.hibernate.pretty; import java.io.Serializable; +import org.hibernate.collection.spi.PersistentCollection; import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.entity.EntityPersister; import org.hibernate.type.Type; @@ -234,7 +236,52 @@ public final class MessageHelper { // collections ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Generate an info message string relating to a particular managed + * collection. Attempts to intelligently handle property-refs issues + * where the collection key is not the same as the owner key. + * + * @param persister The persister for the collection + * @param collection The collection itself + * @param collectionKey The collection key + * @param session The session + * @return An info string, in the form [Foo.bars#1] + */ + public static String collectionInfoString( + CollectionPersister persister, + PersistentCollection collection, + Serializable collectionKey, + SessionImplementor session ) { + + StringBuilder s = new StringBuilder(); + s.append( '[' ); + if ( persister == null ) { + s.append( "" ); + } + else { + s.append( persister.getRole() ); + s.append( '#' ); + + Type ownerIdentifierType = persister.getOwnerEntityPersister() + .getIdentifierType(); + Serializable ownerKey; + // TODO: Is it redundant to attempt to use the collectionKey, + // or is always using the owner id sufficient? + if ( collectionKey.getClass().isAssignableFrom( + ownerIdentifierType.getReturnedClass() ) ) { + ownerKey = collectionKey; + } else { + ownerKey = session.getPersistenceContext() + .getEntry( collection.getOwner() ).getId(); + } + s.append( ownerIdentifierType.toLoggableString( + ownerKey, session.getFactory() ) ); + } + s.append( ']' ); + return s.toString(); + } /** * Generate an info message string relating to a series of managed @@ -258,11 +305,7 @@ public final class MessageHelper { s.append( persister.getRole() ); s.append( "#<" ); for ( int i = 0; i < ids.length; i++ ) { - // Need to use the identifier type of the collection owner - // since the incoming is value is actually the owner's id. - // Using the collection's key type causes problems with - // property-ref keys... - s.append( persister.getOwnerEntityPersister().getIdentifierType().toLoggableString( ids[i], factory ) ); + addIdToCollectionInfoString( persister, ids[i], factory, s ); if ( i < ids.length-1 ) { s.append( ", " ); } @@ -299,17 +342,37 @@ public final class MessageHelper { s.append( "" ); } else { - // Need to use the identifier type of the collection owner - // since the incoming is value is actually the owner's id. - // Using the collection's key type causes problems with - // property-ref keys... - s.append( persister.getOwnerEntityPersister().getIdentifierType().toLoggableString( id, factory ) ); + addIdToCollectionInfoString( persister, id, factory, s ); } } s.append( ']' ); return s.toString(); } + + private static void addIdToCollectionInfoString( + CollectionPersister persister, + Serializable id, + SessionFactoryImplementor factory, + StringBuilder s ) { + // Need to use the identifier type of the collection owner + // since the incoming is value is actually the owner's id. + // Using the collection's key type causes problems with + // property-ref keys. + // Also need to check that the expected identifier type matches + // the given ID. Due to property-ref keys, the collection key + // may not be the owner key. + Type ownerIdentifierType = persister.getOwnerEntityPersister() + .getIdentifierType(); + if ( id.getClass().isAssignableFrom( + ownerIdentifierType.getReturnedClass() ) ) { + s.append( ownerIdentifierType.toLoggableString( id, factory ) ); + } else { + // TODO: This is a crappy backup if a property-ref is used. + // If the reference is an object w/o toString(), this isn't going to work. + s.append( id.toString() ); + } + } /** * Generate an info message string relating to a particular managed diff --git a/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java b/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java index 5d06d3c2ec..bf4f94c484 100755 --- a/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java +++ b/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java @@ -26,19 +26,19 @@ package org.hibernate.proxy; import java.io.Serializable; import javax.naming.NamingException; -import org.jboss.logging.Logger; +import javax.naming.NamingException; import org.hibernate.HibernateException; import org.hibernate.LazyInitializationException; import org.hibernate.Session; import org.hibernate.SessionException; import org.hibernate.TransientObjectException; -import org.hibernate.cfg.AvailableSettings; import org.hibernate.engine.spi.EntityKey; import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.internal.SessionFactoryRegistry; import org.hibernate.persister.entity.EntityPersister; +import org.jboss.logging.Logger; /** * Convenience base class for lazy initialization handlers. Centralizes the basic plumbing of doing lazy @@ -191,6 +191,22 @@ public abstract class AbstractLazyInitializer implements LazyInitializer { SessionFactoryImplementor sf = (SessionFactoryImplementor) SessionFactoryRegistry.INSTANCE.getSessionFactory( sessionFactoryUuid ); SessionImplementor session = (SessionImplementor) sf.openSession(); + + // TODO: On the next major release, add an + // 'isJTA' or 'getTransactionFactory' method to Session. + boolean isJTA = session.getTransactionCoordinator() + .getTransactionContext().getTransactionEnvironment() + .getTransactionFactory() + .compatibleWithJtaSynchronization(); + + if ( !isJTA ) { + // Explicitly handle the transactions only if we're not in + // a JTA environment. A lazy loading temporary session can + // be created even if a current session and transaction are + // open (ex: session.clear() was used). We must prevent + // multiple transactions. + ( ( Session) session ).beginTransaction(); + } try { target = session.immediateLoad( entityName, id ); @@ -198,6 +214,9 @@ public abstract class AbstractLazyInitializer implements LazyInitializer { finally { // make sure the just opened temp session gets closed! try { + if ( !isJTA ) { + ( ( Session) session ).getTransaction().commit(); + } ( (Session) session ).close(); } catch (Exception e) { @@ -224,12 +243,7 @@ public abstract class AbstractLazyInitializer implements LazyInitializer { protected void prepareForPossibleSpecialSpecjInitialization() { if ( session != null ) { - specjLazyLoad = - Boolean.parseBoolean( - session.getFactory() - .getProperties() - .getProperty( AvailableSettings.ENABLE_LAZY_LOAD_NO_TRANS ) - ); + specjLazyLoad = session.getFactory().getSettings().isInitializeLazyStateOutsideTransactionsEnabled(); if ( specjLazyLoad && sessionFactoryUuid == null ) { try { diff --git a/hibernate-core/src/main/java/org/hibernate/sql/SelectValues.java b/hibernate-core/src/main/java/org/hibernate/sql/SelectValues.java new file mode 100644 index 0000000000..10921037e4 --- /dev/null +++ b/hibernate-core/src/main/java/org/hibernate/sql/SelectValues.java @@ -0,0 +1,121 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.sql; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Set; + +import org.jboss.logging.Logger; + +import org.hibernate.dialect.Dialect; + +/** + * Models a SELECT values lists. Eventually, rather than Strings, pass in the Column/Formula representations (something + * like {@link org.hibernate.sql.ordering.antlr.ColumnReference}/{@link org.hibernate.sql.ordering.antlr.FormulaReference} + * + * @author Steve Ebersole + */ +public class SelectValues { + private static final Logger log = Logger.getLogger( SelectValues.class ); + + private static class SelectValue { + private final String qualifier; + private final String value; + private final String alias; + + private SelectValue(String qualifier, String value, String alias) { + this.qualifier = qualifier; + this.value = value; + this.alias = alias; + } + } + + private final Dialect dialect; + private final ArrayList selectValueList = new ArrayList(); + + public SelectValues(Dialect dialect) { + this.dialect = dialect; + } + + public SelectValues addColumns(String qualifier, String[] columnNames, String[] columnAliases) { + for ( int i = 0; i < columnNames.length; i++ ) { + if ( columnNames[i] != null ) { + addColumn( qualifier, columnNames[i], columnAliases[i] ); + } + } + return this; + } + + public SelectValues addColumn(String qualifier, String columnName, String columnAlias) { + selectValueList.add( new SelectValue( qualifier, columnName, columnAlias ) ); + return this; + } + + public SelectValues addParameter(int jdbcTypeCode, int length) { + final String selectExpression = dialect.requiresCastingOfParametersInSelectClause() + ? dialect.cast( "?", jdbcTypeCode, length ) + : "?"; + selectValueList.add( new SelectValue( null, selectExpression, null ) ); + return this; + } + + public SelectValues addParameter(int jdbcTypeCode, int precision, int scale) { + final String selectExpression = dialect.requiresCastingOfParametersInSelectClause() + ? dialect.cast( "?", jdbcTypeCode, precision, scale ) + : "?"; + selectValueList.add( new SelectValue( null, selectExpression, null ) ); + return this; + } + + public String render() { + final StringBuilder buf = new StringBuilder( selectValueList.size() * 10 ); + final HashSet uniqueAliases = new HashSet(); + boolean firstExpression = true; + for ( SelectValue selectValue : selectValueList ) { + if ( selectValue.alias != null ) { + if ( ! uniqueAliases.add( selectValue.alias ) ) { + log.debug( "Skipping select-value with non-unique alias" ); + continue; + } + } + + if ( firstExpression ) { + firstExpression = false; + } + else { + buf.append( ", " ); + } + + if ( selectValue.qualifier != null ) { + buf.append( selectValue.qualifier ).append( '.' ); + } + buf.append( selectValue.value ); + if ( selectValue.alias != null ) { + buf.append( " as " ).append( selectValue.alias ); + } + } + return buf.toString(); + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java b/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java index 69d114c477..2b74e76872 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java @@ -377,7 +377,7 @@ public class SchemaExport { } public void execute(Target output, Type type) { - if ( output == Target.NONE || type == SchemaExport.Type.NONE ) { + if ( (outputFile == null && output == Target.NONE) || type == SchemaExport.Type.NONE ) { return; } exceptions.clear(); diff --git a/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java b/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java index 57a70c9d9e..779b623e5f 100644 --- a/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java +++ b/hibernate-core/src/main/java/org/hibernate/tuple/component/PojoComponentTuplizer.java @@ -152,7 +152,7 @@ public class PojoComponentTuplizer extends AbstractComponentTuplizer { if ( component == BackrefPropertyAccessor.UNKNOWN ) { return new Object[propertySpan]; } - if ( optimizer != null && optimizer.getAccessOptimizer() != null ) { + else if ( optimizer != null && optimizer.getAccessOptimizer() != null ) { return optimizer.getAccessOptimizer().getPropertyValues( component ); } else { diff --git a/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java b/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java index 2dba605cfa..b6653d087a 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/AbstractStandardBasicType.java @@ -49,6 +49,7 @@ import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; * Convenience base class for {@link BasicType} implementations * * @author Steve Ebersole + * @author Brett Meyer */ public abstract class AbstractStandardBasicType implements BasicType, StringRepresentableType, ProcedureParameterExtractionAware { @@ -56,8 +57,10 @@ public abstract class AbstractStandardBasicType private static final Size DEFAULT_SIZE = new Size( 19, 2, 255, Size.LobMultiplier.NONE ); // to match legacy behavior private final Size dictatedSize = new Size(); - private final SqlTypeDescriptor sqlTypeDescriptor; - private final JavaTypeDescriptor javaTypeDescriptor; + // Don't use final here. Need to initialize after-the-fact + // by DynamicParameterizedTypes. + private SqlTypeDescriptor sqlTypeDescriptor; + private JavaTypeDescriptor javaTypeDescriptor; public AbstractStandardBasicType(SqlTypeDescriptor sqlTypeDescriptor, JavaTypeDescriptor javaTypeDescriptor) { this.sqlTypeDescriptor = sqlTypeDescriptor; @@ -113,17 +116,24 @@ public abstract class AbstractStandardBasicType protected Size getDictatedSize() { return dictatedSize; } - - + // final implementations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public final JavaTypeDescriptor getJavaTypeDescriptor() { return javaTypeDescriptor; } + + public final void setJavaTypeDescriptor( JavaTypeDescriptor javaTypeDescriptor ) { + this.javaTypeDescriptor = javaTypeDescriptor; + } public final SqlTypeDescriptor getSqlTypeDescriptor() { return sqlTypeDescriptor; } + + public final void setSqlTypeDescriptor( SqlTypeDescriptor sqlTypeDescriptor ) { + this.sqlTypeDescriptor = sqlTypeDescriptor; + } public final Class getReturnedClass() { return javaTypeDescriptor.getJavaTypeClass(); @@ -234,24 +244,7 @@ public abstract class AbstractStandardBasicType } public final T nullSafeGet(ResultSet rs, String name, final SessionImplementor session) throws SQLException { - // todo : have SessionImplementor extend WrapperOptions - final WrapperOptions options = new WrapperOptions() { - public boolean useStreamForLobBinding() { - return Environment.useStreamsForBinary(); - } - - public LobCreator getLobCreator() { - return Hibernate.getLobCreator( session ); - } - - public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) { - final SqlTypeDescriptor remapped = sqlTypeDescriptor.canBeRemapped() - ? session.getFactory().getDialect().remapSqlTypeDescriptor( sqlTypeDescriptor ) - : sqlTypeDescriptor; - return remapped == null ? sqlTypeDescriptor : remapped; - } - }; - + final WrapperOptions options = getOptions(session); return nullSafeGet( rs, name, options ); } @@ -269,24 +262,7 @@ public abstract class AbstractStandardBasicType Object value, int index, final SessionImplementor session) throws SQLException { - // todo : have SessionImplementor extend WrapperOptions - final WrapperOptions options = new WrapperOptions() { - public boolean useStreamForLobBinding() { - return Environment.useStreamsForBinary(); - } - - public LobCreator getLobCreator() { - return Hibernate.getLobCreator( session ); - } - - public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) { - final SqlTypeDescriptor remapped = sqlTypeDescriptor.canBeRemapped() - ? session.getFactory().getDialect().remapSqlTypeDescriptor( sqlTypeDescriptor ) - : sqlTypeDescriptor; - return remapped == null ? sqlTypeDescriptor : remapped; - } - }; - + final WrapperOptions options = getOptions(session); nullSafeSet( st, value, index, options ); } @@ -375,24 +351,7 @@ public abstract class AbstractStandardBasicType @Override public T extract(CallableStatement statement, int startIndex, final SessionImplementor session) throws SQLException { - // todo : have SessionImplementor extend WrapperOptions - final WrapperOptions options = new WrapperOptions() { - public boolean useStreamForLobBinding() { - return Environment.useStreamsForBinary(); - } - - public LobCreator getLobCreator() { - return Hibernate.getLobCreator( session ); - } - - public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) { - final SqlTypeDescriptor remapped = sqlTypeDescriptor.canBeRemapped() - ? session.getFactory().getDialect().remapSqlTypeDescriptor( sqlTypeDescriptor ) - : sqlTypeDescriptor; - return remapped == null ? sqlTypeDescriptor : remapped; - } - }; - + final WrapperOptions options = getOptions(session); return remapSqlTypeDescriptor( options ).getExtractor( javaTypeDescriptor ).extract( statement, startIndex, @@ -402,10 +361,16 @@ public abstract class AbstractStandardBasicType @Override public T extract(CallableStatement statement, String[] paramNames, final SessionImplementor session) throws SQLException { - // todo : have SessionImplementor extend WrapperOptions - final WrapperOptions options = new WrapperOptions() { + final WrapperOptions options = getOptions(session); + return remapSqlTypeDescriptor( options ).getExtractor( javaTypeDescriptor ).extract( statement, paramNames, options ); + } + + // TODO : have SessionImplementor extend WrapperOptions + private WrapperOptions getOptions(final SessionImplementor session) { + return new WrapperOptions() { public boolean useStreamForLobBinding() { - return Environment.useStreamsForBinary(); + return Environment.useStreamsForBinary() + || session.getFactory().getDialect().useInputStreamToInsertBlob(); } public LobCreator getLobCreator() { @@ -419,7 +384,5 @@ public abstract class AbstractStandardBasicType return remapped == null ? sqlTypeDescriptor : remapped; } }; - - return remapSqlTypeDescriptor( options ).getExtractor( javaTypeDescriptor ).extract( statement, paramNames, options ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java b/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java index 4f6d8c9cc8..6cc3aff172 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java @@ -29,33 +29,38 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; - -import org.dom4j.Element; -import org.dom4j.Node; +import java.util.SortedMap; +import java.util.TreeMap; import org.hibernate.EntityMode; import org.hibernate.Hibernate; import org.hibernate.HibernateException; import org.hibernate.MappingException; import org.hibernate.collection.spi.PersistentCollection; +import org.hibernate.engine.spi.CollectionEntry; import org.hibernate.engine.spi.CollectionKey; import org.hibernate.engine.spi.EntityEntry; import org.hibernate.engine.spi.Mapping; import org.hibernate.engine.spi.PersistenceContext; import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.util.MarkerObject; import org.hibernate.internal.util.collections.ArrayHelper; +import org.hibernate.internal.util.collections.CollectionHelper; import org.hibernate.metamodel.spi.relational.Size; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.QueryableCollection; import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.Joinable; +import org.hibernate.pretty.MessageHelper; import org.hibernate.proxy.HibernateProxy; import org.hibernate.proxy.LazyInitializer; +import org.jboss.logging.Logger; /** * A type that handles Hibernate PersistentCollections (including arrays). @@ -64,6 +69,8 @@ import org.hibernate.proxy.LazyInitializer; */ public abstract class CollectionType extends AbstractType implements AssociationType { + private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, CollectionType.class.getName()); + private static final Object NOT_NULL_COLLECTION = new MarkerObject( "NOT NULL COLLECTION" ); public static final Object UNFETCHED_COLLECTION = new MarkerObject( "UNFETCHED COLLECTION" ); @@ -508,12 +515,92 @@ public abstract class CollectionType extends AbstractType implements Association if ( ! ( ( PersistentCollection ) original ).isDirty() ) { ( ( PersistentCollection ) result ).clearDirty(); } + + if ( elemType instanceof AssociationType ) { + preserveSnapshot( (PersistentCollection) original, + (PersistentCollection) result, + (AssociationType) elemType, owner, copyCache, + session ); + } } } return result; } + private void preserveSnapshot(PersistentCollection original, + PersistentCollection result, AssociationType elemType, + Object owner, Map copyCache, SessionImplementor session) { + Serializable originalSnapshot = original.getStoredSnapshot(); + Serializable resultSnapshot = result.getStoredSnapshot(); + Serializable targetSnapshot; + + if ( originalSnapshot instanceof List ) { + targetSnapshot = new ArrayList( + ( (List) originalSnapshot ).size() ); + for ( Object obj : (List) originalSnapshot ) { + ( (List) targetSnapshot ).add( elemType.replace( + obj, null, session, owner, copyCache ) ); + } + + } + else if ( originalSnapshot instanceof Map ) { + if ( originalSnapshot instanceof SortedMap ) { + targetSnapshot = new TreeMap( + ( (SortedMap) originalSnapshot ).comparator() ); + } + else { + targetSnapshot = new HashMap( + CollectionHelper.determineProperSizing( + ( (Map) originalSnapshot ).size() ), + CollectionHelper.LOAD_FACTOR ); + } + + for ( Map.Entry entry : ( + (Map) originalSnapshot ).entrySet() ) { + Object key = entry.getKey(); + Object value = entry.getValue(); + Object resultSnapshotValue = ( resultSnapshot == null ) ? null + : ( (Map) resultSnapshot ).get( key ); + + if ( key == value ) { + Object newValue = elemType.replace( value, + resultSnapshotValue, session, owner, copyCache ); + ( (Map) targetSnapshot ).put( newValue, newValue ); + + } + else { + Object newValue = elemType.replace( value, + resultSnapshotValue, session, owner, copyCache ); + ( (Map) targetSnapshot ).put( key, newValue ); + } + + } + + } + else if ( originalSnapshot instanceof Object[] ) { + Object[] arr = (Object[]) originalSnapshot; + for ( int i = 0; i < arr.length; i++ ) { + arr[i] = elemType.replace( + arr[i], null, session, owner, copyCache ); + } + targetSnapshot = originalSnapshot; + + } + else { + // retain the same snapshot + targetSnapshot = resultSnapshot; + + } + + CollectionEntry ce = session.getPersistenceContext().getCollectionEntry( + result ); + if ( ce != null ) { + ce.resetStoredSnapshot( result, targetSnapshot ); + } + + } + /** * Instantiate a new "underlying" collection exhibiting the same capacity * charactersitcs and the passed "original". @@ -625,6 +712,7 @@ public abstract class CollectionType extends AbstractType implements Association if ( collection == null ) { // create a new collection wrapper, to be initialized later collection = instantiate( session, persister, key ); + collection.setOwner(owner); persistenceContext.addUninitializedCollection( persister, collection, key ); @@ -643,6 +731,12 @@ public abstract class CollectionType extends AbstractType implements Association } + if ( LOG.isTraceEnabled() ) { + LOG.tracef( "Created collection wrapper: %s", + MessageHelper.collectionInfoString( persister, collection, + key, session ) ); + } + } collection.setOwner(owner); diff --git a/hibernate-core/src/main/java/org/hibernate/type/ComponentType.java b/hibernate-core/src/main/java/org/hibernate/type/ComponentType.java index 213aa6c077..7691754665 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/ComponentType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/ComponentType.java @@ -425,7 +425,15 @@ public class ComponentType extends AbstractType implements CompositeType, Proced public Object[] getPropertyValues(Object component, EntityMode entityMode) throws HibernateException { - return componentTuplizer.getPropertyValues( component ); + if ( component instanceof Object[] ) { + // A few calls to hashCode pass the property values already in an + // Object[] (ex: QueryKey hash codes for cached queries). + // It's easiest to just check for the condition here prior to + // trying reflection. + return ( Object[] ) component; + } else { + return componentTuplizer.getPropertyValues( component ); + } } public void setPropertyValues(Object component, Object[] values, EntityMode entityMode) diff --git a/hibernate-core/src/main/java/org/hibernate/type/CustomType.java b/hibernate-core/src/main/java/org/hibernate/type/CustomType.java index b41a4b2181..1c06118480 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/CustomType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/CustomType.java @@ -52,7 +52,10 @@ import org.hibernate.usertype.UserVersionType; * @author Gavin King * @author Steve Ebersole */ -public class CustomType extends AbstractType implements IdentifierType, DiscriminatorType, VersionType, BasicType, StringRepresentableType { +public class CustomType + extends AbstractType + implements IdentifierType, DiscriminatorType, VersionType, BasicType, StringRepresentableType { + private final UserType userType; private final String name; private final int[] types; @@ -144,7 +147,7 @@ public class CustomType extends AbstractType implements IdentifierType, Discrimi SessionImplementor session, Object owner, Map copyCache) throws HibernateException { - return userType.replace(original, target, owner); + return userType.replace( original, target, owner ); } public void nullSafeSet(PreparedStatement st, Object value, int index, boolean[] settable, SessionImplementor session) diff --git a/hibernate-core/src/main/java/org/hibernate/type/EntityType.java b/hibernate-core/src/main/java/org/hibernate/type/EntityType.java index fdce2a7351..6593e71569 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/EntityType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/EntityType.java @@ -480,6 +480,9 @@ public abstract class EntityType extends AbstractType implements AssociationType throw new ClassCastException( value.getClass().getName() ); } id = ( Serializable ) value; + } else if ( value instanceof HibernateProxy ) { + HibernateProxy proxy = ( HibernateProxy ) value; + id = proxy.getHibernateLazyInitializer().getIdentifier(); } else { id = persister.getIdentifier( value ); diff --git a/hibernate-core/src/main/java/org/hibernate/type/EnumType.java b/hibernate-core/src/main/java/org/hibernate/type/EnumType.java index abbdfd81af..8fb18c3990 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/EnumType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/EnumType.java @@ -100,12 +100,42 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser @Override public Object nullSafeGet(ResultSet rs, String[] names, SessionImplementor session, Object owner) throws SQLException { if ( enumValueMapper == null ) { - guessTypeOfEnumValueMapper( rs.getMetaData().getColumnType( rs.findColumn( names[0] ) ) ); + resolveEnumValueMapper( rs, names[0] ); } return enumValueMapper.getValue( rs, names ); } - private void guessTypeOfEnumValueMapper(int columnType) { + private void resolveEnumValueMapper(ResultSet rs, String name) { + if ( enumValueMapper == null ) { + try { + resolveEnumValueMapper( rs.getMetaData().getColumnType( rs.findColumn( name ) ) ); + } + catch (Exception e) { + // because some drivers do not implement this + LOG.debugf( + "JDBC driver threw exception calling java.sql.ResultSetMetaData.getColumnType; " + + "using fallback determination [%s] : %s", + enumClass.getName(), + e.getMessage() + ); + // peek at the result value to guess type (this is legacy behavior) + try { + Object value = rs.getObject( name ); + if ( Number.class.isInstance( value ) ) { + treatAsOrdinal(); + } + else { + treatAsNamed(); + } + } + catch (SQLException ignore) { + treatAsOrdinal(); + } + } + } + } + + private void resolveEnumValueMapper(int columnType) { // fallback for cases where not enough parameter/parameterization information was passed in if ( isOrdinal( columnType ) ) { treatAsOrdinal(); @@ -118,11 +148,29 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser @Override public void nullSafeSet(PreparedStatement st, Object value, int index, SessionImplementor session) throws HibernateException, SQLException { if ( enumValueMapper == null ) { - guessTypeOfEnumValueMapper( st.getParameterMetaData().getParameterType( index ) ); + resolveEnumValueMapper( st, index ); } enumValueMapper.setValue( st, (Enum) value, index ); } + private void resolveEnumValueMapper(PreparedStatement st, int index) { + if ( enumValueMapper == null ) { + try { + resolveEnumValueMapper( st.getParameterMetaData().getParameterType( index ) ); + } + catch (Exception e) { + // because some drivers do not implement this + LOG.debugf( + "JDBC driver threw exception calling java.sql.ParameterMetaData#getParameterType; " + + "falling back to ordinal-based enum mapping [%s] : %s", + enumClass.getName(), + e.getMessage() + ); + treatAsOrdinal(); + } + } + } + @Override public Object deepCopy(Object value) throws HibernateException { return value; @@ -153,8 +201,8 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser final ParameterType reader = (ParameterType) parameters.get( PARAMETER_TYPE ); // IMPL NOTE : be protective about not setting enumValueMapper (i.e. calling treatAsNamed/treatAsOrdinal) - // in cases where we do not have enough information. In such cases the `if` check in nullSafeGet/nullSafeSet - // will kick in to query against the JDBC metadata to make that determination. + // in cases where we do not have enough information. In such cases we do additional checks + // as part of nullSafeGet/nullSafeSet to query against the JDBC metadata to make the determination. if ( reader != null ) { enumClass = reader.getReturnedClass().asSubclass( Enum.class ); @@ -213,12 +261,14 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser private void treatAsOrdinal() { if ( enumValueMapper == null || ! OrdinalEnumValueMapper.class.isInstance( enumValueMapper ) ) { enumValueMapper = new OrdinalEnumValueMapper(); + sqlType = enumValueMapper.getSqlType(); } } private void treatAsNamed() { if ( enumValueMapper == null || ! NamedEnumValueMapper.class.isInstance( enumValueMapper ) ) { enumValueMapper = new NamedEnumValueMapper(); + sqlType = enumValueMapper.getSqlType(); } } @@ -250,29 +300,20 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser @Override public String objectToSQLString(Object value) { - if ( enumValueMapper == null ) { - guessTypeOfEnumValueMapper( sqlType ); - } return enumValueMapper.objectToSQLString( (Enum) value ); } @Override public String toString(Object value) throws HibernateException { - if ( enumValueMapper == null ) { - guessTypeOfEnumValueMapper( sqlType ); - } return enumValueMapper.toString( ( Enum) value ); } @Override public Object fromStringValue(String string) throws HibernateException { - if ( enumValueMapper == null ) { - guessTypeOfEnumValueMapper( sqlType ); - } return enumValueMapper.fromString( string ); } - private static interface EnumValueMapper { + private static interface EnumValueMapper extends Serializable { public int getSqlType(); public Enum getValue(ResultSet rs, String[] names) throws SQLException; public void setValue(PreparedStatement st, Enum value, int index) throws SQLException; @@ -291,20 +332,20 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser if ( jdbcValue == null ) { if ( LOG.isTraceEnabled() ) { - LOG.tracev( "Binding null to parameter: {0}", index ); + LOG.trace(String.format("Binding null to parameter: [%s]", index)); } st.setNull( index, getSqlType() ); return; } if ( LOG.isTraceEnabled() ) { - LOG.tracev( "Binding '{0}' to parameter: '{1}", jdbcValue, index ); + LOG.trace(String.format("Binding [%s] to parameter: [%s]", jdbcValue, index)); } st.setObject( index, jdbcValue, EnumType.this.sqlType ); } } - private class OrdinalEnumValueMapper extends EnumValueMapperSupport implements EnumValueMapper { + private class OrdinalEnumValueMapper extends EnumValueMapperSupport implements EnumValueMapper, Serializable { private transient Enum[] enumsByOrdinal; @Override @@ -317,14 +358,14 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser final int ordinal = rs.getInt( names[0] ); if ( rs.wasNull() ) { if ( LOG.isTraceEnabled() ) { - LOG.tracev( "Returning null as column {0}", names[0] ); + LOG.trace(String.format("Returning null as column [%s]", names[0])); } return null; } final Enum enumValue = fromOrdinal( ordinal ); if ( LOG.isTraceEnabled() ) { - LOG.tracev( "Returning '{0}' as column {1}", enumValue, names[0] ); + LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0])); } return enumValue; } @@ -375,7 +416,7 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser } } - private class NamedEnumValueMapper extends EnumValueMapperSupport implements EnumValueMapper { + private class NamedEnumValueMapper extends EnumValueMapperSupport implements EnumValueMapper, Serializable { @Override public int getSqlType() { return Types.VARCHAR; @@ -387,14 +428,14 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser if ( rs.wasNull() ) { if ( LOG.isTraceEnabled() ) { - LOG.tracev( "Returning null as column {0}", names[0] ); + LOG.trace(String.format("Returning null as column [%s]", names[0])); } return null; } final Enum enumValue = fromName( value ); if ( LOG.isTraceEnabled() ) { - LOG.tracev( "Returning '{0}' as column {1}", enumValue, names[0] ); + LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0])); } return enumValue; } @@ -435,6 +476,10 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType, Ser } } + public boolean isOrdinal() { + return isOrdinal( sqlType ); + } + private boolean isOrdinal(int paramType) { switch ( paramType ) { case Types.INTEGER: diff --git a/hibernate-core/src/main/java/org/hibernate/type/ManyToOneType.java b/hibernate-core/src/main/java/org/hibernate/type/ManyToOneType.java index 09a7448098..983eb083af 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/ManyToOneType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/ManyToOneType.java @@ -169,7 +169,7 @@ public class ManyToOneType extends EntityType { if ( uniqueKeyPropertyName == null && id != null ) { final EntityPersister persister = session.getFactory().getEntityPersister( getAssociatedEntityName() ); final EntityKey entityKey = session.generateEntityKey( id, persister ); - if ( !session.getPersistenceContext().containsEntity( entityKey ) ) { + if ( entityKey.isBatchLoadable() && !session.getPersistenceContext().containsEntity( entityKey ) ) { session.getPersistenceContext().getBatchFetchQueue().addBatchLoadableEntityKey( entityKey ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/type/SerializableToBlobType.java b/hibernate-core/src/main/java/org/hibernate/type/SerializableToBlobType.java index ac548deaa7..702d14f07f 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/SerializableToBlobType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/SerializableToBlobType.java @@ -23,128 +23,57 @@ */ package org.hibernate.type; -import java.io.ByteArrayInputStream; import java.io.Serializable; -import java.sql.Blob; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Types; -import java.util.Map; import java.util.Properties; -import org.dom4j.Node; -import org.hibernate.Hibernate; -import org.hibernate.HibernateException; import org.hibernate.MappingException; -import org.hibernate.engine.spi.Mapping; -import org.hibernate.engine.spi.SessionFactoryImplementor; -import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.internal.util.ReflectHelper; -import org.hibernate.internal.util.SerializationHelper; +import org.hibernate.type.descriptor.java.SerializableTypeDescriptor; +import org.hibernate.type.descriptor.sql.BlobTypeDescriptor; import org.hibernate.usertype.DynamicParameterizedType; /** - * @author Emmanuel Bernard + * @author Brett Meyer */ -public class SerializableToBlobType extends AbstractLobType implements DynamicParameterizedType { - /** - * class name of the serialisable class - */ +public class SerializableToBlobType extends AbstractSingleColumnStandardBasicType implements DynamicParameterizedType { + public static final String CLASS_NAME = "classname"; - private Class serializableClass; - private SerializableType type; + + private static final long serialVersionUID = 1L; - public int[] sqlTypes(Mapping mapping) throws MappingException { - return new int[]{Types.BLOB}; + /** + * @param sqlTypeDescriptor + * @param javaTypeDescriptor + */ + public SerializableToBlobType() { + super( BlobTypeDescriptor.DEFAULT, new SerializableTypeDescriptor( Serializable.class ) ); } - public Class getReturnedClass() { - return serializableClass; - } - - @Override - public boolean isEqual(Object x, Object y, SessionFactoryImplementor factory) { - return type.isEqual( x, y ); - } - - - @Override - public int getHashCode(Object x, SessionFactoryImplementor session) { - return type.getHashCode( x ); - } - - public Object get(ResultSet rs, String name) throws SQLException { - Blob blob = rs.getBlob( name ); - if ( rs.wasNull() ) return null; - int length = (int) blob.length(); - byte[] primaryResult = blob.getBytes( 1, length ); - return fromBytes( primaryResult ); - } - - private static byte[] toBytes(Object object) throws SerializationException { - return SerializationHelper.serialize( (Serializable) object ); - } - - private Object fromBytes(byte[] bytes) throws SerializationException { - return SerializationHelper.deserialize( bytes, getReturnedClass().getClassLoader() ); - } - - public void set(PreparedStatement st, Object value, int index, SessionImplementor session) throws SQLException { - if ( value != null ) { - byte[] toSet; - toSet = toBytes( value ); - if ( session.getFactory().getDialect().useInputStreamToInsertBlob() ) { - st.setBinaryStream( index, new ByteArrayInputStream( toSet ), toSet.length ); - } - else { - st.setBlob( index, Hibernate.getLobCreator( session ).createBlob( toSet ) ); - } - } - else { - st.setNull( index, sqlTypes( null )[0] ); - } - } - - public String toLoggableString(Object value, SessionFactoryImplementor factory) throws HibernateException { - return type.toLoggableString( value, factory ); - } - - public Object deepCopy(Object value, SessionFactoryImplementor factory) - throws HibernateException { - return type.deepCopy( value, null ); - } - - public boolean isMutable() { - return type.isMutable(); - } - - public Object replace(Object original, Object target, SessionImplementor session, Object owner, Map copyCache) - throws HibernateException { - return type.replace( original, target, session, owner, copyCache ); - } - - public boolean[] toColumnNullness(Object value, Mapping mapping) { - return type.toColumnNullness( value, mapping ); + /** + * {@inheritDoc} + */ + public String getName() { + return getClass().getName(); } + /** + * {@inheritDoc} + */ + @SuppressWarnings("unchecked") public void setParameterValues(Properties parameters) { ParameterType reader = (ParameterType) parameters.get( PARAMETER_TYPE ); if ( reader != null ) { - serializableClass = reader.getReturnedClass(); - } - else { + setJavaTypeDescriptor( new SerializableTypeDescriptor( reader.getReturnedClass() ) ); + } else { String className = parameters.getProperty( CLASS_NAME ); if ( className == null ) { throw new MappingException( "No class name defined for type: " + SerializableToBlobType.class.getName() ); } try { - serializableClass = ReflectHelper.classForName( className ); - } - catch ( ClassNotFoundException e ) { + setJavaTypeDescriptor( new SerializableTypeDescriptor( ReflectHelper.classForName( className ) ) ); + } catch ( ClassNotFoundException e ) { throw new MappingException( "Unable to load class from " + CLASS_NAME + " parameter", e ); } } - type = new SerializableType( serializableClass ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BlobTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BlobTypeDescriptor.java index d7b104657f..8d48932b96 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BlobTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/BlobTypeDescriptor.java @@ -23,15 +23,19 @@ */ package org.hibernate.type.descriptor.java; +import java.io.IOException; +import java.io.InputStream; import java.io.Serializable; import java.sql.Blob; import java.sql.SQLException; import java.util.Comparator; import org.hibernate.HibernateException; +import org.hibernate.engine.jdbc.BlobImplementer; import org.hibernate.engine.jdbc.BlobProxy; import org.hibernate.engine.jdbc.WrappedBlob; -import org.hibernate.type.descriptor.BinaryStream; +import org.hibernate.engine.jdbc.BinaryStream; +import org.hibernate.engine.jdbc.internal.BinaryStreamImpl; import org.hibernate.type.descriptor.WrapperOptions; /** @@ -41,6 +45,7 @@ import org.hibernate.type.descriptor.WrapperOptions; * treat them as immutable because we cannot properly check them for changes nor deep copy them. * * @author Steve Ebersole + * @author Brett Meyer */ public class BlobTypeDescriptor extends AbstractTypeDescriptor { public static final BlobTypeDescriptor INSTANCE = new BlobTypeDescriptor(); @@ -108,27 +113,43 @@ public class BlobTypeDescriptor extends AbstractTypeDescriptor { @SuppressWarnings({ "unchecked" }) public X unwrap(Blob value, Class type, WrapperOptions options) { - if ( ! ( Blob.class.isAssignableFrom( type ) || BinaryStream.class.isAssignableFrom( type ) ) ) { - throw unknownUnwrap( type ); - } - if ( value == null ) { return null; } - if ( BinaryStream.class.isAssignableFrom( type ) ) { - try { - return (X) new BinaryStreamImpl( DataHelper.extractBytes( value.getBinaryStream() ) ); + try { + if ( BinaryStream.class.isAssignableFrom( type ) ) { + if ( BlobImplementer.class.isInstance( value ) ) { + // if the incoming Blob is a wrapper, just pass along its BinaryStream + return (X) ( (BlobImplementer) value ).getUnderlyingStream(); + } + else { + // otherwise we need to build a BinaryStream... + return (X) new BinaryStreamImpl( DataHelper.extractBytes( value.getBinaryStream() ) ); + } } - catch ( SQLException e ) { - throw new HibernateException( "Unable to access blob stream", e ); + else if ( byte[].class.isAssignableFrom( type )) { + if ( BlobImplementer.class.isInstance( value ) ) { + // if the incoming Blob is a wrapper, just grab the bytes from its BinaryStream + return (X) ( (BlobImplementer) value ).getUnderlyingStream().getBytes(); + } + else { + // otherwise extract the bytes from the stream manually + return (X) DataHelper.extractBytes( value.getBinaryStream() ); + } + } + else if (Blob.class.isAssignableFrom( type )) { + final Blob blob = WrappedBlob.class.isInstance( value ) + ? ( (WrappedBlob) value ).getWrappedBlob() + : value; + return (X) blob; } } - - final Blob blob = WrappedBlob.class.isInstance( value ) - ? ( (WrappedBlob) value ).getWrappedBlob() - : value; - return (X) blob; + catch ( SQLException e ) { + throw new HibernateException( "Unable to access blob stream", e ); + } + + throw unknownUnwrap( type ); } public Blob wrap(X value, WrapperOptions options) { @@ -136,10 +157,24 @@ public class BlobTypeDescriptor extends AbstractTypeDescriptor { return null; } - if ( ! Blob.class.isAssignableFrom( value.getClass() ) ) { - throw unknownWrap( value.getClass() ); + // Support multiple return types from + // org.hibernate.type.descriptor.sql.BlobTypeDescriptor + if ( Blob.class.isAssignableFrom( value.getClass() ) ) { + return options.getLobCreator().wrap( (Blob) value ); + } + else if ( byte[].class.isAssignableFrom( value.getClass() ) ) { + return options.getLobCreator().createBlob( ( byte[] ) value); + } + else if ( InputStream.class.isAssignableFrom( value.getClass() ) ) { + InputStream inputStream = ( InputStream ) value; + try { + return options.getLobCreator().createBlob( inputStream, inputStream.available() ); + } + catch ( IOException e ) { + throw unknownWrap( value.getClass() ); + } } - return options.getLobCreator().wrap( (Blob) value ); + throw unknownWrap( value.getClass() ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ByteArrayTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ByteArrayTypeDescriptor.java index 450d7ddffe..9c15fdf719 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ByteArrayTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ByteArrayTypeDescriptor.java @@ -29,7 +29,8 @@ import java.sql.Blob; import java.sql.SQLException; import org.hibernate.HibernateException; -import org.hibernate.type.descriptor.BinaryStream; +import org.hibernate.engine.jdbc.BinaryStream; +import org.hibernate.engine.jdbc.internal.BinaryStreamImpl; import org.hibernate.type.descriptor.WrapperOptions; /** diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterArrayTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterArrayTypeDescriptor.java index 8d77e0cc04..59ee39666f 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterArrayTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterArrayTypeDescriptor.java @@ -28,7 +28,8 @@ import java.io.StringReader; import java.sql.Clob; import java.util.Arrays; -import org.hibernate.type.descriptor.CharacterStream; +import org.hibernate.engine.jdbc.CharacterStream; +import org.hibernate.engine.jdbc.internal.CharacterStreamImpl; import org.hibernate.type.descriptor.WrapperOptions; /** diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ClobTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ClobTypeDescriptor.java index e417d2c0fc..97a46c4e91 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ClobTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ClobTypeDescriptor.java @@ -27,9 +27,11 @@ import java.io.Serializable; import java.sql.Clob; import java.util.Comparator; +import org.hibernate.engine.jdbc.ClobImplementer; import org.hibernate.engine.jdbc.ClobProxy; import org.hibernate.engine.jdbc.WrappedClob; -import org.hibernate.type.descriptor.CharacterStream; +import org.hibernate.engine.jdbc.CharacterStream; +import org.hibernate.engine.jdbc.internal.CharacterStreamImpl; import org.hibernate.type.descriptor.WrapperOptions; /** @@ -102,7 +104,14 @@ public class ClobTypeDescriptor extends AbstractTypeDescriptor { } if ( CharacterStream.class.isAssignableFrom( type ) ) { - return (X) new CharacterStreamImpl( DataHelper.extractString( value ) ); + if ( ClobImplementer.class.isInstance( value ) ) { + // if the incoming Clob is a wrapper, just pass along its CharacterStream + return (X) ( (ClobImplementer) value ).getUnderlyingStream(); + } + else { + // otherwise we need to build one... + return (X) new CharacterStreamImpl( DataHelper.extractString( value ) ); + } } final Clob clob = WrappedClob.class.isInstance( value ) diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/DataHelper.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/DataHelper.java index 9a2fd71989..8a17e8a185 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/DataHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/DataHelper.java @@ -34,8 +34,9 @@ import java.sql.SQLException; import org.jboss.logging.Logger; import org.hibernate.HibernateException; +import org.hibernate.engine.jdbc.internal.BinaryStreamImpl; import org.hibernate.internal.CoreMessageLogger; -import org.hibernate.type.descriptor.BinaryStream; +import org.hibernate.engine.jdbc.BinaryStream; /** * A help for dealing with BLOB and CLOB data diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveByteArrayTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveByteArrayTypeDescriptor.java index 95df0e3562..6cf3337ef3 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveByteArrayTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveByteArrayTypeDescriptor.java @@ -30,7 +30,8 @@ import java.sql.SQLException; import java.util.Arrays; import org.hibernate.HibernateException; -import org.hibernate.type.descriptor.BinaryStream; +import org.hibernate.engine.jdbc.BinaryStream; +import org.hibernate.engine.jdbc.internal.BinaryStreamImpl; import org.hibernate.type.descriptor.WrapperOptions; /** diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveCharacterArrayTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveCharacterArrayTypeDescriptor.java index 33bbbc8b5f..1d2f219b3e 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveCharacterArrayTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/PrimitiveCharacterArrayTypeDescriptor.java @@ -28,7 +28,8 @@ import java.io.StringReader; import java.sql.Clob; import java.util.Arrays; -import org.hibernate.type.descriptor.CharacterStream; +import org.hibernate.engine.jdbc.CharacterStream; +import org.hibernate.engine.jdbc.internal.CharacterStreamImpl; import org.hibernate.type.descriptor.WrapperOptions; /** diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/SerializableTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/SerializableTypeDescriptor.java index 47a6a40e88..493a5247e5 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/SerializableTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/SerializableTypeDescriptor.java @@ -26,15 +26,20 @@ package org.hibernate.type.descriptor.java; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.Serializable; +import java.sql.Blob; +import java.sql.SQLException; +import org.hibernate.HibernateException; +import org.hibernate.engine.jdbc.internal.BinaryStreamImpl; import org.hibernate.internal.util.SerializationHelper; -import org.hibernate.type.descriptor.BinaryStream; +import org.hibernate.engine.jdbc.BinaryStream; import org.hibernate.type.descriptor.WrapperOptions; /** * Descriptor for general {@link Serializable} handling. * * @author Steve Ebersole + * @author Brett meyer */ public class SerializableTypeDescriptor extends AbstractTypeDescriptor { @@ -96,28 +101,32 @@ public class SerializableTypeDescriptor extends Abstract public X unwrap(T value, Class type, WrapperOptions options) { if ( value == null ) { return null; - } - if ( byte[].class.isAssignableFrom( type ) ) { + } else if ( byte[].class.isAssignableFrom( type ) ) { return (X) toBytes( value ); - } - if ( InputStream.class.isAssignableFrom( type ) ) { + } else if ( InputStream.class.isAssignableFrom( type ) ) { return (X) new ByteArrayInputStream( toBytes( value ) ); - } - if ( BinaryStream.class.isAssignableFrom( type ) ) { + } else if ( BinaryStream.class.isAssignableFrom( type ) ) { return (X) new BinaryStreamImpl( toBytes( value ) ); + } else if ( Blob.class.isAssignableFrom( type )) { + return (X) options.getLobCreator().createBlob( toBytes(value) ); } + throw unknownUnwrap( type ); } public T wrap(X value, WrapperOptions options) { if ( value == null ) { return null; - } - if ( byte[].class.isInstance( value ) ) { + } else if ( byte[].class.isInstance( value ) ) { return fromBytes( (byte[]) value ); - } - if ( InputStream.class.isInstance( value ) ) { + } else if ( InputStream.class.isInstance( value ) ) { return fromBytes( DataHelper.extractBytes( (InputStream) value ) ); + } else if ( Blob.class.isInstance( value )) { + try { + return fromBytes( DataHelper.extractBytes( ( (Blob) value ).getBinaryStream() ) ); + } catch ( SQLException e ) { + throw new HibernateException(e); + } } throw unknownWrap( value.getClass() ); } diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/StringTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/StringTypeDescriptor.java index eb8f206d51..0e1aa370c4 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/StringTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/StringTypeDescriptor.java @@ -27,7 +27,8 @@ import java.io.Reader; import java.io.StringReader; import java.sql.Clob; -import org.hibernate.type.descriptor.CharacterStream; +import org.hibernate.engine.jdbc.CharacterStream; +import org.hibernate.engine.jdbc.internal.CharacterStreamImpl; import org.hibernate.type.descriptor.WrapperOptions; /** diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/BlobTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/BlobTypeDescriptor.java index 5a60c0a9a7..23b38ad56f 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/BlobTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/BlobTypeDescriptor.java @@ -30,8 +30,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; -import org.hibernate.type.descriptor.BinaryStream; -import org.hibernate.type.descriptor.ValueExtractor; +import org.hibernate.engine.jdbc.BinaryStream; import org.hibernate.type.descriptor.WrapperOptions; import org.hibernate.type.descriptor.java.JavaTypeDescriptor; @@ -40,6 +39,7 @@ import org.hibernate.type.descriptor.java.JavaTypeDescriptor; * * @author Steve Ebersole * @author Gail Badner + * @author Brett Meyer */ public abstract class BlobTypeDescriptor implements SqlTypeDescriptor { @@ -56,24 +56,11 @@ public abstract class BlobTypeDescriptor implements SqlTypeDescriptor { return true; } + protected abstract BasicExtractor getBlobExtractor(final JavaTypeDescriptor javaTypeDescriptor); + @Override - public ValueExtractor getExtractor(final JavaTypeDescriptor javaTypeDescriptor) { - return new BasicExtractor( javaTypeDescriptor, this ) { - @Override - protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException { - return javaTypeDescriptor.wrap( rs.getBlob( name ), options ); - } - - @Override - protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException { - return javaTypeDescriptor.wrap( statement.getBlob( index ), options ); - } - - @Override - protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException { - return javaTypeDescriptor.wrap( statement.getBlob( name ), options ); - } - }; + public BasicExtractor getExtractor(final JavaTypeDescriptor javaTypeDescriptor) { + return getBlobExtractor( javaTypeDescriptor ); } protected abstract BasicBinder getBlobBinder(final JavaTypeDescriptor javaTypeDescriptor); @@ -93,16 +80,39 @@ public abstract class BlobTypeDescriptor implements SqlTypeDescriptor { return new BasicBinder( javaTypeDescriptor, this ) { @Override protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) throws SQLException { + BlobTypeDescriptor descriptor = BLOB_BINDING; if ( options.useStreamForLobBinding() ) { - STREAM_BINDING.getBlobBinder( javaTypeDescriptor ).doBind( st, value, index, options ); + descriptor = STREAM_BINDING; } else if ( byte[].class.isInstance( value ) ) { // performance shortcut for binding BLOB data in byte[] format - PRIMITIVE_ARRAY_BINDING.getBlobBinder( javaTypeDescriptor ).doBind( st, value, index, options ); - } - else { - BLOB_BINDING.getBlobBinder( javaTypeDescriptor ).doBind( st, value, index, options ); + descriptor = PRIMITIVE_ARRAY_BINDING; } + descriptor.getBlobBinder( javaTypeDescriptor ).doBind( st, value, index, options ); + } + }; + } + + @Override + public BasicExtractor getBlobExtractor(final JavaTypeDescriptor javaTypeDescriptor) { + return new BasicExtractor( javaTypeDescriptor, this ) { + // For now, default to using getBlob. If extraction + // should also check useStreamForLobBinding, add + // checks here and use STREAM_BINDING. + + @Override + protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException { + return BLOB_BINDING.getExtractor( javaTypeDescriptor ).doExtract( rs, name, options ); + } + + @Override + protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException { + return BLOB_BINDING.getExtractor( javaTypeDescriptor ).doExtract( statement, index, options ); + } + + @Override + protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException { + return BLOB_BINDING.getExtractor( javaTypeDescriptor ).doExtract( statement, name, options ); } }; } @@ -120,6 +130,26 @@ public abstract class BlobTypeDescriptor implements SqlTypeDescriptor { } }; } + + @Override + public BasicExtractor getBlobExtractor(final JavaTypeDescriptor javaTypeDescriptor) { + return new BasicExtractor( javaTypeDescriptor, this ) { + @Override + protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException { + return javaTypeDescriptor.wrap( rs.getBytes( name ), options ); + } + + @Override + protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException { + return javaTypeDescriptor.wrap( statement.getBytes( index ), options ); + } + + @Override + protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException { + return javaTypeDescriptor.wrap( statement.getBytes( name ), options ); + } + }; + } }; public static final BlobTypeDescriptor BLOB_BINDING = @@ -134,6 +164,26 @@ public abstract class BlobTypeDescriptor implements SqlTypeDescriptor { } }; } + + @Override + public BasicExtractor getBlobExtractor(final JavaTypeDescriptor javaTypeDescriptor) { + return new BasicExtractor( javaTypeDescriptor, this ) { + @Override + protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException { + return javaTypeDescriptor.wrap( rs.getBlob( name ), options ); + } + + @Override + protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException { + return javaTypeDescriptor.wrap( statement.getBlob( index ), options ); + } + + @Override + protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException { + return javaTypeDescriptor.wrap( statement.getBlob( name ), options ); + } + }; + } }; public static final BlobTypeDescriptor STREAM_BINDING = @@ -149,6 +199,28 @@ public abstract class BlobTypeDescriptor implements SqlTypeDescriptor { } }; } + + @Override + public BasicExtractor getBlobExtractor(final JavaTypeDescriptor javaTypeDescriptor) { + return new BasicExtractor( javaTypeDescriptor, this ) { + @Override + protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException { + return javaTypeDescriptor.wrap( rs.getBinaryStream( name ), options ); + } + + @Override + protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException { + // TODO: CallableStatement does not have getBinaryStream + return javaTypeDescriptor.wrap( statement.getBytes( index ), options ); + } + + @Override + protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException { + // TODO: CallableStatement does not have getBinaryStream + return javaTypeDescriptor.wrap( statement.getBytes( name ), options ); + } + }; + } }; } diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/ClobTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/ClobTypeDescriptor.java index ac87c7038b..816b6acc60 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/ClobTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/ClobTypeDescriptor.java @@ -30,7 +30,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; -import org.hibernate.type.descriptor.CharacterStream; +import org.hibernate.engine.jdbc.CharacterStream; import org.hibernate.type.descriptor.ValueBinder; import org.hibernate.type.descriptor.ValueExtractor; import org.hibernate.type.descriptor.WrapperOptions; @@ -122,7 +122,7 @@ public abstract class ClobTypeDescriptor implements SqlTypeDescriptor { protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) throws SQLException { final CharacterStream characterStream = javaTypeDescriptor.unwrap( value, CharacterStream.class, options ); - st.setCharacterStream( index, characterStream.getReader(), characterStream.getLength() ); + st.setCharacterStream( index, characterStream.asReader(), characterStream.getLength() ); } }; } diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/NClobTypeDescriptor.java b/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/NClobTypeDescriptor.java index 6295c7d106..588e0fc80b 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/NClobTypeDescriptor.java +++ b/hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/NClobTypeDescriptor.java @@ -30,7 +30,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; -import org.hibernate.type.descriptor.CharacterStream; +import org.hibernate.engine.jdbc.CharacterStream; import org.hibernate.type.descriptor.ValueBinder; import org.hibernate.type.descriptor.ValueExtractor; import org.hibernate.type.descriptor.WrapperOptions; @@ -122,7 +122,7 @@ public abstract class NClobTypeDescriptor implements SqlTypeDescriptor { protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) throws SQLException { final CharacterStream characterStream = javaTypeDescriptor.unwrap( value, CharacterStream.class, options ); - st.setCharacterStream( index, characterStream.getReader(), characterStream.getLength() ); + st.setCharacterStream( index, characterStream.asReader(), characterStream.getLength() ); } }; } diff --git a/hibernate-core/src/main/java/org/hibernate/usertype/DynamicParameterizedType.java b/hibernate-core/src/main/java/org/hibernate/usertype/DynamicParameterizedType.java index 07043eda19..00520b6d84 100644 --- a/hibernate-core/src/main/java/org/hibernate/usertype/DynamicParameterizedType.java +++ b/hibernate-core/src/main/java/org/hibernate/usertype/DynamicParameterizedType.java @@ -45,6 +45,7 @@ public interface DynamicParameterizedType extends ParameterizedType { public static final String ENTITY = "org.hibernate.type.ParameterType.entityClass"; public static final String PROPERTY = "org.hibernate.type.ParameterType.propertyName"; public static final String ACCESS_TYPE = "org.hibernate.type.ParameterType.accessType"; + public static final String XPROPERTY = "org.hibernate.type.ParameterType.xproperty"; public static interface ParameterType { diff --git a/hibernate-core/src/test/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolverTest.java b/hibernate-core/src/test/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolverTest.java new file mode 100644 index 0000000000..4bc0abb606 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/engine/jdbc/dialect/internal/StandardDialectResolverTest.java @@ -0,0 +1,155 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Middleware LLC. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + * + */ +package org.hibernate.engine.jdbc.dialect.internal; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.sql.DatabaseMetaData; +import java.sql.SQLException; + +import org.hibernate.dialect.Dialect; +import org.hibernate.dialect.PostgreSQL81Dialect; +import org.hibernate.dialect.PostgreSQL82Dialect; +import org.hibernate.dialect.SQLServer2005Dialect; +import org.hibernate.dialect.SQLServer2008Dialect; +import org.hibernate.dialect.SQLServerDialect; +import org.hibernate.testing.junit4.BaseUnitTestCase; +import org.junit.Test; + +/** + * Unit test of the {@link StandardDialectResolver} class. + * + * @author Bryan Turner + */ +public class StandardDialectResolverTest extends BaseUnitTestCase { + + @Test + public void testResolveDialectInternalForSQLServer2000() + throws SQLException { + runSQLServerDialectTest( 8, SQLServerDialect.class ); + } + + @Test + public void testResolveDialectInternalForSQLServer2005() + throws SQLException { + runSQLServerDialectTest( 9, SQLServer2005Dialect.class ); + } + + @Test + public void testResolveDialectInternalForSQLServer2008() + throws SQLException { + runSQLServerDialectTest( 10, SQLServer2008Dialect.class ); + } + + @Test + public void testResolveDialectInternalForSQLServer2012() + throws SQLException { + runSQLServerDialectTest( 11, SQLServer2008Dialect.class ); + } + + @Test + public void testResolveDialectInternalForUnknownSQLServerVersion() + throws SQLException { + runSQLServerDialectTest( 7, SQLServerDialect.class ); + } + + @Test + public void testResolveDialectInternalForPostgres81() + throws SQLException { + runPostgresDialectTest( 8, 1, PostgreSQL81Dialect.class ); + } + + @Test + public void testResolveDialectInternalForPostgres82() + throws SQLException { + runPostgresDialectTest( 8, 2, PostgreSQL82Dialect.class ); + } + + @Test + public void testResolveDialectInternalForPostgres83() throws SQLException { + runPostgresDialectTest( 8, 3, PostgreSQL82Dialect.class ); + } + + @Test + public void testResolveDialectInternalForPostgres84() throws SQLException { + runPostgresDialectTest( 8, 4, PostgreSQL82Dialect.class ); + } + + @Test + public void testResolveDialectInternalForPostgres9() throws SQLException { + runPostgresDialectTest( 9, 0, PostgreSQL82Dialect.class ); + } + + @Test + public void testResolveDialectInternalForPostgres91() throws SQLException { + runPostgresDialectTest( 9, 1, PostgreSQL82Dialect.class ); + } + + @Test + public void testResolveDialectInternalForPostgres92() throws SQLException { + runPostgresDialectTest( 9, 2, PostgreSQL82Dialect.class ); + } + + private static void runSQLServerDialectTest( + int version, Class expectedDialect) + throws SQLException { + runDialectTest( "Microsoft SQL Server", version, 0, + expectedDialect ); + } + + private static void runPostgresDialectTest( + int majorVersion, int minorVersion, + Class expectedDialect) throws SQLException { + runDialectTest( "PostgreSQL", majorVersion, minorVersion, + expectedDialect ); + } + + private static void runDialectTest( + String productName, int majorVersion, int minorVersion, + Class expectedDialect) throws SQLException { + DatabaseMetaData metaData = mock( DatabaseMetaData.class ); + when( metaData.getDatabaseProductName() ).thenReturn( productName ); + when( metaData.getDatabaseMajorVersion() ).thenReturn( majorVersion ); + when( metaData.getDatabaseMinorVersion() ).thenReturn( minorVersion ); + + Dialect dialect = new StandardDialectResolver().resolveDialectInternal( + metaData ); + + StringBuilder builder = new StringBuilder( productName ).append( " " ) + .append( majorVersion ); + if ( minorVersion > 0 ) { + builder.append( "." ).append( minorVersion ); + } + String dbms = builder.toString(); + + assertNotNull( "Dialect for " + dbms + " should not be null", dialect ); + assertTrue( "Dialect for " + dbms + " should be " + + expectedDialect.getSimpleName(), + expectedDialect.isInstance( dialect ) ); + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/CharProperty.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/CharProperty.java index fd579720d6..1029bf46a9 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/CharProperty.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/CharProperty.java @@ -3,6 +3,7 @@ import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Table; +import javax.persistence.Column; @Entity @Table( name = "char_property" ) @@ -11,6 +12,7 @@ public class CharProperty implements Property { private String name; + @Column(name = "`value`") private Character value; public CharProperty() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/IntegerProperty.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/IntegerProperty.java index 0069fa32f3..a05794743a 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/IntegerProperty.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/IntegerProperty.java @@ -3,12 +3,14 @@ import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Table; +import javax.persistence.Column; @Entity @Table(name="int_property") public class IntegerProperty implements Property { private Integer id; private String name; + @Column(name = "`value`") private Integer value; public IntegerProperty() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/LongProperty.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/LongProperty.java index 9502b8aba9..76001abdde 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/LongProperty.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/LongProperty.java @@ -3,6 +3,7 @@ import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Table; +import javax.persistence.Column; @Entity @Table(name = "long_property") @@ -10,7 +11,7 @@ public class LongProperty implements Property { private Integer id; private String name; - + @Column(name = "`value`") private Long value; public LongProperty() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/StringProperty.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/StringProperty.java index 41980e8fb4..ca9dd0b43a 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/any/StringProperty.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/any/StringProperty.java @@ -3,12 +3,14 @@ import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Table; +import javax.persistence.Column; @Entity @Table(name="string_property") public class StringProperty implements Property { private Integer id; private String name; + @Column(name = "`value`") private String value; public StringProperty() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/MinMax.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/MinMax.java index d14c1b3c5f..0170577690 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/MinMax.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/MinMax.java @@ -26,6 +26,7 @@ package org.hibernate.test.annotations.beanvalidation; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; +import javax.persistence.Column; import javax.validation.constraints.Max; import javax.validation.constraints.Min; @@ -41,6 +42,7 @@ public class MinMax { @Max(10) @Min(2) + @Column(name = "`value`") private Integer value; private MinMax() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/Tv.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/Tv.java index 9091eef1b6..e8fca9cd7f 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/Tv.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/beanvalidation/Tv.java @@ -1,87 +1,88 @@ -/* - * Hibernate, Relational Persistence for Idiomatic Java - * - * Copyright (c) 2010 by Red Hat Inc and/or its affiliates or by - * third-party contributors as indicated by either @author tags or express - * copyright attribution statements applied by the authors. All - * third-party contributions are distributed under license by Red Hat Inc. - * - * This copyrighted material is made available to anyone wishing to use, modify, - * copy, or redistribute it subject to the terms and conditions of the GNU - * Lesser General Public License, as published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License - * for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this distribution; if not, write to: - * Free Software Foundation, Inc. - * 51 Franklin Street, Fifth Floor - * Boston, MA 02110-1301 USA - */ - -package org.hibernate.test.annotations.beanvalidation; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.Date; -import javax.persistence.Embeddable; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.validation.Valid; -import javax.validation.constraints.Future; -import javax.validation.constraints.Min; -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Size; - -import org.hibernate.validator.constraints.Length; - -/** - * @author Emmanuel Bernard - * @author Hardy Ferentschik - */ -@Entity -public class Tv { - - @Id - @Size(max = 2) - public String serial; - - @Length(max = 5) - public String model; - - public int size; - - @Size(max = 2) - public String name; - - @Future - public Date expDate; - - @Size(min = 0) - public String description; - - @Min(1000) - public BigInteger lifetime; - - @NotNull - @Valid - public Tuner tuner; - - @Valid - public Recorder recorder; - - @Embeddable - public static class Tuner { - @NotNull - public String frequency; - } - - @Embeddable - public static class Recorder { - @NotNull - public BigDecimal time; - } +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2010 by Red Hat Inc and/or its affiliates or by + * third-party contributors as indicated by either @author tags or express + * copyright attribution statements applied by the authors. All + * third-party contributions are distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ + +package org.hibernate.test.annotations.beanvalidation; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; +import javax.persistence.Embeddable; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Column; +import javax.validation.Valid; +import javax.validation.constraints.Future; +import javax.validation.constraints.Min; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; + +import org.hibernate.validator.constraints.Length; + +/** + * @author Emmanuel Bernard + * @author Hardy Ferentschik + */ +@Entity +public class Tv { + + @Id + @Size(max = 2) + public String serial; + + @Length(max = 5) + public String model; + + public int size; + + @Size(max = 2) + public String name; + + @Future + public Date expDate; + + @Size(min = 0) + public String description; + + @Min(1000) + public BigInteger lifetime; + + @NotNull + @Valid + public Tuner tuner; + + @Valid + public Recorder recorder; + + @Embeddable + public static class Tuner { + @NotNull + public String frequency; + } + + @Embeddable + public static class Recorder { + @NotNull + @Column(name = "`time`") + public BigDecimal time; + } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/cascade/Tooth.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/cascade/Tooth.java index c3faa52ec6..e9e445188c 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/cascade/Tooth.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/cascade/Tooth.java @@ -5,6 +5,7 @@ import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.ManyToOne; +import javax.persistence.Column; /** * @author Emmanuel Bernard @@ -14,6 +15,7 @@ public class Tooth { @Id @GeneratedValue public Integer id; + @Column(name = "`type`") public String type; @ManyToOne(cascade = CascadeType.PERSIST) public Tooth leftNeighbour; diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvMagazin.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvMagazin.java index ed961d8e10..03155a49c0 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvMagazin.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvMagazin.java @@ -1,24 +1,26 @@ -//$Id$ -package org.hibernate.test.annotations.cid; -import java.util.Date; -import javax.persistence.AssociationOverride; -import javax.persistence.AssociationOverrides; -import javax.persistence.EmbeddedId; -import javax.persistence.Entity; -import javax.persistence.JoinColumn; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; - -/** - * @author Emmanuel Bernard - */ -@Entity -@AssociationOverrides({ -@AssociationOverride(name = "id.channel", joinColumns = @JoinColumn(name = "chan_id", nullable = false)), -@AssociationOverride(name = "id.presenter", joinColumns = @JoinColumn(name = "presenter_name", nullable = false))}) -public class TvMagazin { - @EmbeddedId - public TvMagazinPk id; - @Temporal(TemporalType.TIME) - Date time; -} +//$Id$ +package org.hibernate.test.annotations.cid; +import java.util.Date; +import javax.persistence.AssociationOverride; +import javax.persistence.AssociationOverrides; +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; +import javax.persistence.JoinColumn; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.Column; + +/** + * @author Emmanuel Bernard + */ +@Entity +@AssociationOverrides({ +@AssociationOverride(name = "id.channel", joinColumns = @JoinColumn(name = "chan_id", nullable = false)), +@AssociationOverride(name = "id.presenter", joinColumns = @JoinColumn(name = "presenter_name", nullable = false))}) +public class TvMagazin { + @EmbeddedId + public TvMagazinPk id; + @Temporal(TemporalType.TIME) + @Column(name="`time`") + Date time; +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgram.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgram.java index 7a614cb020..5e18117023 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgram.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgram.java @@ -1,36 +1,37 @@ -//$Id$ -package org.hibernate.test.annotations.cid; -import java.util.Date; -import javax.persistence.AssociationOverride; -import javax.persistence.AssociationOverrides; -import javax.persistence.Column; -import javax.persistence.EmbeddedId; -import javax.persistence.Entity; -import javax.persistence.JoinColumn; -import javax.persistence.PrimaryKeyJoinColumn; -import javax.persistence.SecondaryTable; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; - -/** - * @author Chandra Patni - */ -@Entity -@SecondaryTable( name = "TV_PROGRAM_EXT", pkJoinColumns = { -@PrimaryKeyJoinColumn( name = "CHANNEL_ID" ), -@PrimaryKeyJoinColumn( name = "PRESENTER_NAME" ) - } ) -@AssociationOverrides({ -@AssociationOverride(name = "id.channel", joinColumns = @JoinColumn(name = "chan_id", nullable = false)), -@AssociationOverride(name = "id.presenter", joinColumns = @JoinColumn(name = "presenter_name", nullable = false))}) -public class TvProgram { - @EmbeddedId - public TvMagazinPk id; - - @Temporal( TemporalType.TIME ) - Date time; - - @Column( name = "TXT", table = "TV_PROGRAM_EXT" ) - public String text; - +//$Id$ +package org.hibernate.test.annotations.cid; +import java.util.Date; +import javax.persistence.AssociationOverride; +import javax.persistence.AssociationOverrides; +import javax.persistence.Column; +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; +import javax.persistence.JoinColumn; +import javax.persistence.PrimaryKeyJoinColumn; +import javax.persistence.SecondaryTable; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +/** + * @author Chandra Patni + */ +@Entity +@SecondaryTable( name = "TV_PROGRAM_EXT", pkJoinColumns = { +@PrimaryKeyJoinColumn( name = "CHANNEL_ID" ), +@PrimaryKeyJoinColumn( name = "PRESENTER_NAME" ) + } ) +@AssociationOverrides({ +@AssociationOverride(name = "id.channel", joinColumns = @JoinColumn(name = "chan_id", nullable = false)), +@AssociationOverride(name = "id.presenter", joinColumns = @JoinColumn(name = "presenter_name", nullable = false))}) +public class TvProgram { + @EmbeddedId + public TvMagazinPk id; + + @Temporal( TemporalType.TIME ) + @Column(name="`time`") + Date time; + + @Column( name = "TXT", table = "TV_PROGRAM_EXT" ) + public String text; + } \ No newline at end of file diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgramIdClass.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgramIdClass.java index a2200b0493..431881d5b1 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgramIdClass.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/cid/TvProgramIdClass.java @@ -1,36 +1,37 @@ -//$Id$ -package org.hibernate.test.annotations.cid; -import java.util.Date; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.IdClass; -import javax.persistence.JoinColumn; -import javax.persistence.PrimaryKeyJoinColumn; -import javax.persistence.SecondaryTable; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; - -@Entity -@SecondaryTable( name = "TV_PROGRAM_IDCLASS", pkJoinColumns = - { - @PrimaryKeyJoinColumn( name = "CHANNEL_ID" ), - @PrimaryKeyJoinColumn( name = "PRESENTER_NAME" ) - } ) -@IdClass( TvMagazinPk.class ) -public class TvProgramIdClass { - @Id - @JoinColumn(nullable=false) - public Channel channel; - @Id - @JoinColumn(nullable=false) - public Presenter presenter; - - @Temporal( TemporalType.TIME ) - Date time; - - @Column( name = "TXT", table = "TV_PROGRAM_IDCLASS" ) - public String text; -} - - +//$Id$ +package org.hibernate.test.annotations.cid; +import java.util.Date; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.IdClass; +import javax.persistence.JoinColumn; +import javax.persistence.PrimaryKeyJoinColumn; +import javax.persistence.SecondaryTable; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +@Entity +@SecondaryTable( name = "TV_PROGRAM_IDCLASS", pkJoinColumns = + { + @PrimaryKeyJoinColumn( name = "CHANNEL_ID" ), + @PrimaryKeyJoinColumn( name = "PRESENTER_NAME" ) + } ) +@IdClass( TvMagazinPk.class ) +public class TvProgramIdClass { + @Id + @JoinColumn(nullable=false) + public Channel channel; + @Id + @JoinColumn(nullable=false) + public Presenter presenter; + + @Temporal( TemporalType.TIME ) + @Column(name="`time`") + Date time; + + @Column( name = "TXT", table = "TV_PROGRAM_IDCLASS" ) + public String text; +} + + diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/EntityWithAnElementCollection.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/EntityWithAnElementCollection.java new file mode 100644 index 0000000000..1c5609cd3c --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/EntityWithAnElementCollection.java @@ -0,0 +1,68 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.collectionelement; + +import java.util.HashSet; +import java.util.Set; + +import javax.persistence.ElementCollection; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; +import javax.persistence.Table; + +/** + * @author Steve Ebersole + * @author Brett Meyer + */ +@Entity +// HHH-7732 -- "EntityWithAnElementCollection" is too long for Oracle. +@Table( name = "EWAEC" ) +public class EntityWithAnElementCollection { + private Long id; + private Set someStrings = new HashSet(); + + @Id + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + @ElementCollection + // HHH-7732 -- "EntityWithAnElementCollection_someStrings" is too long for Oracle. + @JoinTable( + name = "SomeStrings", + joinColumns = @JoinColumn( name = "EWAEC_ID") ) + public Set getSomeStrings() { + return someStrings; + } + + public void setSomeStrings(Set someStrings) { + this.someStrings = someStrings; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/QueryTest.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/QueryTest.java new file mode 100644 index 0000000000..c56fbcaf63 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/QueryTest.java @@ -0,0 +1,50 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.collectionelement; + +import org.hibernate.Session; + +import org.junit.Test; + +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; + +/** + * @author Steve Ebersole + */ +public class QueryTest extends BaseCoreFunctionalTestCase { + @Override + protected Class[] getAnnotatedClasses() { + return new Class[] { EntityWithAnElementCollection.class }; + } + + @Test + @TestForIssue( jiraKey = "HHH-5209" ) + public void testMemberOfSyntax() { + // performs syntax checking of the MEMBER OF predicate against a basic collection + Session s = openSession(); + s.createQuery( "from EntityWithAnElementCollection e where 'abc' member of e.someStrings" ).list(); + s.close(); + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Location.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Location.java index 2ea6030fee..7125491a42 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Location.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Location.java @@ -1,72 +1,74 @@ -/* - * Hibernate, Relational Persistence for Idiomatic Java - * - * Copyright (c) 2011, Red Hat Inc. or third-party contributors as - * indicated by the @author tags or express copyright attribution - * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Inc. - * - * This copyrighted material is made available to anyone wishing to use, modify, - * copy, or redistribute it subject to the terms and conditions of the GNU - * Lesser General Public License, as published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License - * for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this distribution; if not, write to: - * Free Software Foundation, Inc. - * 51 Franklin Street, Fifth Floor - * Boston, MA 02110-1301 USA - */ -package org.hibernate.test.annotations.collectionelement.embeddables.withcustomenumdef; - -import javax.persistence.Embeddable; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; - -/** - * @author Steve Ebersole - */ -@Embeddable -public class Location { - public static enum Type { - POSTAL_CODE, - COMMUNE, - REGION, - PROVINCE, - COUNTY - } - - private String name; - - @Enumerated(EnumType.STRING) -// @Column(columnDefinition = "VARCHAR(32)") - private Type type; - - public Location() { - } - - public Location(String name, Type type) { - this.name = name; - this.type = type; - } - - public Type getType() { - return type; - } - - public void setType(Type type) { - this.type = type; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } -} +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2011, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.collectionelement.embeddables.withcustomenumdef; + +import javax.persistence.Embeddable; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.Column; + +/** + * @author Steve Ebersole + */ +@Embeddable +public class Location { + public static enum Type { + POSTAL_CODE, + COMMUNE, + REGION, + PROVINCE, + COUNTY + } + + private String name; + + @Enumerated(EnumType.STRING) +// @Column(columnDefinition = "VARCHAR(32)") + @Column(name = "`type`") + private Type type; + + public Location() { + } + + public Location(String name, Type type) { + this.name = name; + this.type = type; + } + + public Type getType() { + return type; + } + + public void setType(Type type) { + this.type = type; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Query.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Query.java index 9dd70b6b08..07f16ed706 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Query.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/collectionelement/embeddables/withcustomenumdef/Query.java @@ -31,6 +31,7 @@ import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; +import javax.persistence.Table; import org.hibernate.annotations.GenericGenerator; @@ -38,6 +39,7 @@ import org.hibernate.annotations.GenericGenerator; * @author Steve Ebersole */ @Entity +@Table(name="`Query`") public class Query { @Id @GeneratedValue( generator = "increment" ) diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/derivedidentities/e1/b/specjmapid/lazy/order_orm.xml b/hibernate-core/src/test/java/org/hibernate/test/annotations/derivedidentities/e1/b/specjmapid/lazy/order_orm.xml index 259e3cba10..4342e6d3c1 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/derivedidentities/e1/b/specjmapid/lazy/order_orm.xml +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/derivedidentities/e1/b/specjmapid/lazy/order_orm.xml @@ -22,7 +22,7 @@ - + diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/embeddables/Investment.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/embeddables/Investment.java index db2f598c23..b5b7cccf06 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/embeddables/Investment.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/embeddables/Investment.java @@ -1,63 +1,64 @@ -/* - * Hibernate, Relational Persistence for Idiomatic Java - * - * Copyright (c) 2012, Red Hat Inc. or third-party contributors as - * indicated by the @author tags or express copyright attribution - * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Inc. - * - * This copyrighted material is made available to anyone wishing to use, modify, - * copy, or redistribute it subject to the terms and conditions of the GNU - * Lesser General Public License, as published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License - * for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this distribution; if not, write to: - * Free Software Foundation, Inc. - * 51 Franklin Street, Fifth Floor - * Boston, MA 02110-1301 USA - */ -package org.hibernate.test.annotations.embeddables; - -import javax.persistence.Column; -import javax.persistence.Embeddable; - -/** - * @author Chris Pheby - */ -@Embeddable -public class Investment { - - private DollarValue amount; - private String description; - private MyDate date; - - public DollarValue getAmount() { - return amount; - } - - public void setAmount(DollarValue amount) { - this.amount = amount; - } - - @Column(length = 500) - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public MyDate getDate() { - return date; - } - - public void setDate(MyDate date) { - this.date = date; - } -} +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.embeddables; + +import javax.persistence.Column; +import javax.persistence.Embeddable; + +/** + * @author Chris Pheby + */ +@Embeddable +public class Investment { + + private DollarValue amount; + private String description; + @Column(name = "`date`") + private MyDate date; + + public DollarValue getAmount() { + return amount; + } + + public void setAmount(DollarValue amount) { + this.amount = amount; + } + + @Column(length = 500) + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public MyDate getDate() { + return date; + } + + public void setDate(MyDate date) { + this.date = date; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/embedded/CorpType.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/embedded/CorpType.java index 47d0833d42..a42ab32ab7 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/embedded/CorpType.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/embedded/CorpType.java @@ -1,32 +1,34 @@ -//$Id$ -package org.hibernate.test.annotations.embedded; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.Id; - -/** - * @author Emmanuel Bernard - */ -@Entity -public class CorpType { - private Integer id; - private String type; - - @Id - @GeneratedValue - public Integer getId() { - return id; - } - - public void setId(Integer id) { - this.id = id; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } -} +//$Id$ +package org.hibernate.test.annotations.embedded; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.Column; + +/** + * @author Emmanuel Bernard + */ +@Entity +public class CorpType { + private Integer id; + @Column(name = "`type`") + private String type; + + @Id + @GeneratedValue + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/FirstLetterType.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/FirstLetterType.java index f09c609779..d8beae0b30 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/FirstLetterType.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/FirstLetterType.java @@ -36,7 +36,10 @@ public class FirstLetterType extends org.hibernate.type.EnumType { } else { String enumString = ( (Enum) value ).name(); - st.setObject( index, enumString.charAt( 0 ), sqlTypes()[0] ); + // Using setString here, rather than setObject. A few JDBC drivers + // (Oracle, DB2, and SQLServer) were having trouble converting + // the char to VARCHAR. + st.setString( index, enumString.substring( 0, 1 ) ); } } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/LastNumberType.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/LastNumberType.java index 4930ae5fd9..790d3dccf5 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/LastNumberType.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/LastNumberType.java @@ -37,7 +37,10 @@ public class LastNumberType extends org.hibernate.type.EnumType { else { String enumString = ( (Enum) value ).name(); - st.setObject( index, enumString.charAt( enumString.length() - 1 ), sqlTypes()[0] ); + // Using setString here, rather than setObject. A few JDBC drivers + // (Oracle, DB2, and SQLServer) were having trouble converting + // the char to VARCHAR. + st.setString( index, enumString.substring( enumString.length() - 1 ) ); } } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/MapKeyEnumeratedTest.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/MapKeyEnumeratedTest.java new file mode 100644 index 0000000000..a7dff77569 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/MapKeyEnumeratedTest.java @@ -0,0 +1,63 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.enumerated.mapkey; + +import org.hibernate.Session; + +import org.junit.Test; + +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; + +/** + * @author Steve Ebersole + */ +public class MapKeyEnumeratedTest extends BaseCoreFunctionalTestCase { + @Override + protected Class[] getAnnotatedClasses() { + return new Class[] { User.class, SocialNetworkProfile.class }; + } + + @Test + public void testMapKeyEnumerated() { + Session s = openSession(); + s.beginTransaction(); + User user = new User(SocialNetwork.STUB_NETWORK_NAME, "facebookId"); + s.save( user ); + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + user = (User) s.get( User.class, user.getId() ); + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + user = (User) s.get( User.class, user.getId() ); + s.delete( user ); + s.getTransaction().commit(); + s.close(); + } +} diff --git a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterStreamImpl.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/SocialNetwork.java similarity index 62% rename from hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterStreamImpl.java rename to hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/SocialNetwork.java index 13b7a786ca..efc550c87d 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterStreamImpl.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/SocialNetwork.java @@ -1,7 +1,7 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * - * Copyright (c) 2010, Red Hat Inc. or third-party contributors as + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are * distributed under license by Red Hat Inc. @@ -21,32 +21,12 @@ * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ -package org.hibernate.type.descriptor.java; - -import java.io.Reader; -import java.io.StringReader; - -import org.hibernate.type.descriptor.CharacterStream; +package org.hibernate.test.annotations.enumerated.mapkey; /** - * Implementation of {@link CharacterStream} - * + * @author Dmitry Spikhalskiy * @author Steve Ebersole */ -public class CharacterStreamImpl implements CharacterStream { - private final StringReader reader; - private final int length; - - public CharacterStreamImpl(String chars) { - reader = new StringReader( chars ); - length = chars.length(); - } - - public Reader getReader() { - return reader; - } - - public int getLength() { - return length; - } +public enum SocialNetwork { + STUB_NETWORK_NAME } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/SocialNetworkProfile.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/SocialNetworkProfile.java new file mode 100644 index 0000000000..98e4de2bc8 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/SocialNetworkProfile.java @@ -0,0 +1,68 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.enumerated.mapkey; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.FetchType; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; +import javax.persistence.UniqueConstraint; + +/** + * @author Dmitry Spikhalskiy + * @author Steve Ebersole + */ +@Entity +@Table(name = "social_network_profile", uniqueConstraints = {@UniqueConstraint(columnNames = {"social_network", "network_id"})}) +public class SocialNetworkProfile { + @javax.persistence.Id + @javax.persistence.GeneratedValue(generator = "system-uuid") + @org.hibernate.annotations.GenericGenerator(name = "system-uuid", strategy = "uuid2") + @javax.persistence.Column(name = "id", unique = true) + private java.lang.String id; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "user_id", nullable = false) + private User user; + + @Enumerated(value = EnumType.STRING) //if change type to ordinal - test will not failure + @Column(name = "social_network", nullable = false) + private SocialNetwork socialNetworkType; + + @Column(name = "network_id", nullable = false) + private String networkId; + + protected SocialNetworkProfile() { + } + + protected SocialNetworkProfile(User user, SocialNetwork socialNetworkType, String networkId) { + this.user = user; + this.socialNetworkType = socialNetworkType; + this.networkId = networkId; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/User.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/User.java new file mode 100644 index 0000000000..a286af8c18 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/mapkey/User.java @@ -0,0 +1,68 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.enumerated.mapkey; + +import javax.persistence.CascadeType; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.FetchType; +import javax.persistence.MapKeyColumn; +import javax.persistence.MapKeyEnumerated; +import javax.persistence.OneToMany; +import java.util.EnumMap; +import java.util.Map; + +/** + * @author Dmitry Spikhalskiy + * @author Steve Ebersole + */ +@Entity +public class User { + @javax.persistence.Id + @javax.persistence.GeneratedValue(generator = "system-uuid") + @org.hibernate.annotations.GenericGenerator(name = "system-uuid", strategy = "uuid2") + @javax.persistence.Column(name = "id", unique = true) + private java.lang.String id; + + @MapKeyEnumerated( EnumType.STRING ) + @MapKeyColumn(name = "social_network") + @OneToMany(mappedBy = "user", fetch = FetchType.LAZY, cascade = CascadeType.ALL, orphanRemoval = true) + private Map socialNetworkProfiles = new EnumMap(SocialNetwork.class); + + protected User() { + } + + public User(SocialNetwork sn, String socialNetworkId) { + SocialNetworkProfile profile = new SocialNetworkProfile(this, sn, socialNetworkId); + socialNetworkProfiles.put(sn, profile); + } + + public SocialNetworkProfile getSocialNetworkProfile(SocialNetwork socialNetwork) { + return socialNetworkProfiles.get(socialNetwork); + } + + public String getId() { + return id; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/Binding.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/Binding.java new file mode 100644 index 0000000000..ba0588070b --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/Binding.java @@ -0,0 +1,33 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.enumerated.ormXml; + +/** + * @author Oliverio + * @author Steve Ebersole + */ +public enum Binding { + PAPERBACK, + HARDCOVER +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/BookWithOrmEnum.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/BookWithOrmEnum.java new file mode 100644 index 0000000000..2572862310 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/BookWithOrmEnum.java @@ -0,0 +1,54 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.enumerated.ormXml; + +/** + * @author Oliverio + * @author Steve Ebersole + */ +public class BookWithOrmEnum { + private Long id; + private Binding bindingOrdinalEnum; + private Binding bindingStringEnum; + + public Long getId() { + return id; + } + + public Binding getBindingOrdinalEnum() { + return bindingOrdinalEnum; + } + + public void setBindingOrdinalEnum(Binding bindingOrdinalEnum) { + this.bindingOrdinalEnum = bindingOrdinalEnum; + } + + public Binding getBindingStringEnum() { + return bindingStringEnum; + } + + public void setBindingStringEnum(Binding bindingStringEnum) { + this.bindingStringEnum = bindingStringEnum; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/OrmXmlEnumTypeTest.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/OrmXmlEnumTypeTest.java new file mode 100644 index 0000000000..0fee63658e --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/enumerated/ormXml/OrmXmlEnumTypeTest.java @@ -0,0 +1,57 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.annotations.enumerated.ormXml; + +import org.hibernate.type.CustomType; +import org.hibernate.type.EnumType; +import org.hibernate.type.Type; + +import org.junit.Test; + +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.hibernate.testing.junit4.ExtraAssertions; + +import static org.junit.Assert.assertFalse; + +/** + * @author Steve Ebersole + */ +@TestForIssue( jiraKey = "HHH-7645" ) +public class OrmXmlEnumTypeTest extends BaseCoreFunctionalTestCase { + @Override + protected String[] getXmlFiles() { + return new String[] { "org/hibernate/test/annotations/enumerated/ormXml/orm.xml" }; + } + + @Test + public void testOrmXmlDefinedEnumType() { + Type bindingPropertyType = configuration().getClassMapping( BookWithOrmEnum.class.getName() ) + .getProperty( "bindingStringEnum" ) + .getType(); + CustomType customType = ExtraAssertions.assertTyping( CustomType.class, bindingPropertyType ); + EnumType enumType = ExtraAssertions.assertTyping( EnumType.class, customType.getUserType() ); + assertFalse( enumType.isOrdinal() ); + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/filter/subclass/joined/JoinedSubClassTest.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/filter/subclass/joined/JoinedSubClassTest.java index 1613dbc703..0f9cc763b6 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/filter/subclass/joined/JoinedSubClassTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/filter/subclass/joined/JoinedSubClassTest.java @@ -1,12 +1,19 @@ package org.hibernate.test.annotations.filter.subclass.joined; import junit.framework.Assert; -import org.junit.Test; +import org.hibernate.dialect.CUBRIDDialect; import org.hibernate.test.annotations.filter.subclass.SubClassTest; import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.SkipForDialect; +import org.junit.Test; @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class JoinedSubClassTest extends SubClassTest{ @Override diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/Course.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/Course.java new file mode 100644 index 0000000000..04b521f0db --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/Course.java @@ -0,0 +1,53 @@ +package org.hibernate.test.annotations.idmanytoone; + +import java.io.Serializable; +import java.util.Set; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.Table; + +/** + * @author Alex Kalashnikov + */ +@Entity +@Table(name = "idmanytoone_course") +public class Course implements Serializable { + + @Id + @GeneratedValue + private int id; + + private String name; + + @OneToMany(mappedBy = "course") + private Set students; + + public Course() { + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Set getStudents() { + return students; + } + + public void setStudents(Set students) { + this.students = students; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/CourseStudent.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/CourseStudent.java new file mode 100644 index 0000000000..4d34f8d9df --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/CourseStudent.java @@ -0,0 +1,57 @@ +package org.hibernate.test.annotations.idmanytoone; + +import java.io.Serializable; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.IdClass; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; + +/** + * @author Alex Kalashnikov + */ +@Entity +@Table(name = "idmanytoone_course_student") +public class CourseStudent implements Serializable { + + @Id + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "course_id") + private Course course; + + @Id + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "student_id") + private Student student; + + private String value; + + public CourseStudent() { + } + + public Course getCourse() { + return course; + } + + public void setCourse(Course course) { + this.course = course; + } + + public Student getStudent() { + return student; + } + + public void setStudent(Student student) { + this.student = student; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/IdManyToOneTest.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/IdManyToOneTest.java index 1318e8bfe5..0fec8399b6 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/IdManyToOneTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/IdManyToOneTest.java @@ -23,14 +23,16 @@ */ package org.hibernate.test.annotations.idmanytoone; -import org.junit.Test; +import static org.junit.Assert.assertEquals; +import org.hibernate.Criteria; import org.hibernate.Session; import org.hibernate.Transaction; +import org.hibernate.criterion.Restrictions; import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.TestForIssue; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; - -import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * @author Emmanuel Bernard @@ -66,6 +68,29 @@ public class IdManyToOneTest extends BaseCoreFunctionalTestCase { s.close(); } + @Test + @TestForIssue( jiraKey = "HHH-7767" ) + public void testCriteriaRestrictionOnIdManyToOne() { + Session s = openSession(); + s.beginTransaction(); + + s.createQuery( "from Course c join c.students cs join cs.student s where s.name = 'Foo'" ).list(); + + Criteria criteria = s.createCriteria( Course.class ); + criteria.createCriteria( "students" ).createCriteria( "student" ).add( Restrictions.eq( "name", "Foo" ) ); + criteria.list(); + + Criteria criteria2 = s.createCriteria( Course.class ); + criteria2.createAlias( "students", "cs" ); + criteria2.add( Restrictions.eq( "cs.value", "Bar" ) ); + criteria2.createAlias( "cs.student", "s" ); + criteria2.add( Restrictions.eq( "s.name", "Foo" ) ); + criteria2.list(); + + s.getTransaction().commit(); + s.close(); + } + @Override protected Class[] getAnnotatedClasses() { return new Class[] { @@ -76,6 +101,9 @@ public class IdManyToOneTest extends BaseCoreFunctionalTestCase { CardField.class, Card.class, Project.class, + Course.class, + Student.class, + CourseStudent.class, //tested only through deployment //ANN-590 testIdClassManyToOneWithReferenceColumn diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/Student.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/Student.java new file mode 100644 index 0000000000..921d437fcb --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/idmanytoone/Student.java @@ -0,0 +1,53 @@ +package org.hibernate.test.annotations.idmanytoone; + +import java.io.Serializable; +import java.util.Set; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.Table; + +/** + * @author Alex Kalashnikov + */ +@Entity +@Table(name = "idmanytoone_student") +public class Student implements Serializable { + + @Id + @GeneratedValue + private int id; + + private String name; + + @OneToMany(mappedBy = "student") + private Set courses; + + public Student() { + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Set getCourses() { + return courses; + } + + public void setCourses(Set courses) { + this.courses = courses; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/join/SysUserOrm.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/join/SysUserOrm.java index 0b4c94306a..7892c34d62 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/join/SysUserOrm.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/join/SysUserOrm.java @@ -1,43 +1,43 @@ -package org.hibernate.test.annotations.join; -import java.util.Collection; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.GeneratedValue; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.ManyToMany; -import javax.persistence.Table; - -@Entity( name = "sys_user" ) -@Table( name = "SYS_USER" ) -public class SysUserOrm { - - private long userid; - - private Collection groups; - - @Id - @GeneratedValue - @Column( name = "`auid`" ) - public long getUserid() { - return userid; - } - - public void setUserid( long userid ) { - this.userid = userid; - } - - @ManyToMany( fetch = FetchType.LAZY ) - @JoinTable( name = "SYS_GROUPS_USERS", - joinColumns = @JoinColumn( name = "USERID", referencedColumnName = "`auid`" ), - inverseJoinColumns = @JoinColumn( name = "GROUPID", referencedColumnName = "GROUPID" ) ) - public Collection getGroups() { - return groups; - } - - public void setGroups( Collection groups ) { - this.groups = groups; - } -} +package org.hibernate.test.annotations.join; +import java.util.Collection; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; +import javax.persistence.ManyToMany; +import javax.persistence.Table; + +@Entity( name = "sys_user" ) +@Table( name = "`SYS_USER`" ) +public class SysUserOrm { + + private long userid; + + private Collection groups; + + @Id + @GeneratedValue + @Column( name = "`auid`" ) + public long getUserid() { + return userid; + } + + public void setUserid( long userid ) { + this.userid = userid; + } + + @ManyToMany( fetch = FetchType.LAZY ) + @JoinTable( name = "SYS_GROUPS_USERS", + joinColumns = @JoinColumn( name = "USERID", referencedColumnName = "`auid`" ), + inverseJoinColumns = @JoinColumn( name = "GROUPID", referencedColumnName = "GROUPID" ) ) + public Collection getGroups() { + return groups; + } + + public void setGroups( Collection groups ) { + this.groups = groups; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ExplicitSerializableType.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ExplicitSerializableType.java index 13effc1804..75db1eae38 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ExplicitSerializableType.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ExplicitSerializableType.java @@ -1,30 +1,31 @@ package org.hibernate.test.annotations.lob; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; +import java.io.Serializable; -import org.hibernate.engine.spi.SessionImplementor; -import org.hibernate.test.annotations.lob.EntitySerialize.CommonSerializable; import org.hibernate.type.SerializableToBlobType; /** * @author Janario Oliveira */ -public class ExplicitSerializableType extends SerializableToBlobType { - @Override - public Object get(ResultSet rs, String name) throws SQLException { - CommonSerializable deserialize = (CommonSerializable) super.get( rs, name ); - deserialize.setDefaultValue( "EXPLICIT" ); - return deserialize; - } - - @Override - public void set(PreparedStatement st, Object value, int index, SessionImplementor session) throws SQLException { - if ( value != null ) { - ( (CommonSerializable) value ).setDefaultValue( null ); - } - super.set( st, value, index, session ); - } +public class ExplicitSerializableType extends SerializableToBlobType { + + // TODO: Find another way to test that this type is being used by + // SerializableToBlobTypeTest#testPersist. Most AbstractStandardBasicType + // methods are final. + +// @Override +// public Object get(ResultSet rs, String name) throws SQLException { +// CommonSerializable deserialize = (CommonSerializable) super.get( rs, name ); +// deserialize.setDefaultValue( "EXPLICIT" ); +// return deserialize; +// } +// +// @Override +// public void set(PreparedStatement st, Object value, int index, SessionImplementor session) throws SQLException { +// if ( value != null ) { +// ( (CommonSerializable) value ).setDefaultValue( null ); +// } +// super.set( st, value, index, session ); +// } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ImplicitSerializableType.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ImplicitSerializableType.java index c88fb14dea..61de2ffdb3 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ImplicitSerializableType.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/ImplicitSerializableType.java @@ -1,31 +1,31 @@ package org.hibernate.test.annotations.lob; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; +import java.io.Serializable; -import org.hibernate.engine.spi.SessionImplementor; -import org.hibernate.test.annotations.lob.EntitySerialize.CommonSerializable; import org.hibernate.type.SerializableToBlobType; /** * @author Janario Oliveira */ -public class ImplicitSerializableType extends SerializableToBlobType { +public class ImplicitSerializableType extends SerializableToBlobType { - @Override - public Object get(ResultSet rs, String name) throws SQLException { - CommonSerializable deserialize = (CommonSerializable) super.get( rs, name ); - deserialize.setDefaultValue( "IMPLICIT" ); - return deserialize; - } - - @Override - public void set(PreparedStatement st, Object value, int index, SessionImplementor session) throws SQLException { - if ( value != null ) { - ( (CommonSerializable) value ).setDefaultValue( null ); - } - super.set( st, value, index, session ); - } + // TODO: Find another way to test that this type is being used by + // SerializableToBlobTypeTest#testPersist. Most AbstractStandardBasicType + // methods are final. + +// @Override +// public Object get(ResultSet rs, String name) throws SQLException { +// CommonSerializable deserialize = (CommonSerializable) super.get( rs, name ); +// deserialize.setDefaultValue( "IMPLICIT" ); +// return deserialize; +// } +// +// @Override +// public void set(PreparedStatement st, Object value, int index, SessionImplementor session) throws SQLException { +// if ( value != null ) { +// ( (CommonSerializable) value ).setDefaultValue( null ); +// } +// super.set( st, value, index, session ); +// } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/SerializableToBlobTypeTest.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/SerializableToBlobTypeTest.java index 57e16adf46..359df27fc7 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/SerializableToBlobTypeTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/SerializableToBlobTypeTest.java @@ -80,9 +80,6 @@ public class SerializableToBlobTypeTest extends BaseCoreFunctionalTestCase { assertEquals( "explicitOverridingImplicit", persistedSerialize.explicitOverridingImplicit.value ); assertEquals( "defaultExplicitLob", persistedSerialize.explicitLob.defaultValue ); - assertEquals( "EXPLICIT", persistedSerialize.explicit.defaultValue ); - assertEquals( "IMPLICIT", persistedSerialize.implicit.defaultValue ); - assertEquals( "EXPLICIT", persistedSerialize.explicitOverridingImplicit.defaultValue ); session.close(); } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/hhh4635/LobTest.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/hhh4635/LobTest.java new file mode 100644 index 0000000000..d34a8cb05b --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/hhh4635/LobTest.java @@ -0,0 +1,60 @@ +package org.hibernate.test.annotations.lob.hhh4635; + +import org.hibernate.Query; +import org.hibernate.Session; +import org.hibernate.dialect.Oracle8iDialect; +import org.hibernate.testing.RequiresDialect; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.jboss.logging.Logger; +import org.junit.Test; + +/** + * To reproduce this issue, Oracle MUST use a multi-byte character set (UTF-8)! + * + * @author Brett Meyer + */ +@RequiresDialect( Oracle8iDialect.class ) +@TestForIssue( jiraKey = "HHH-4635" ) +public class LobTest extends BaseCoreFunctionalTestCase { + + private static final Logger LOG = Logger.getLogger( LobTest.class ); + + @Test + public void hibernateTest() { + printConfig(); + + Session session = openSession(); + session.beginTransaction(); + LobTestEntity entity = new LobTestEntity(); + entity.setId(1L); + entity.setLobValue(session.getLobHelper().createBlob(new byte[9999])); + entity.setQwerty(randomString(4000)); + session.save(entity); + session.getTransaction().commit(); + } + + @Override + protected Class[] getAnnotatedClasses() { + return new Class[] { LobTestEntity.class }; + } + + private String randomString( int count ) { + StringBuilder buffer = new StringBuilder(count); + for( int i = 0; i < count; i++ ) { + buffer.append( 'a' ); + } + return buffer.toString(); + } + + private void printConfig() { + String sql = "select value from V$NLS_PARAMETERS where parameter = 'NLS_CHARACTERSET'"; + + Session session = openSession(); + session.beginTransaction(); + Query query = session.createSQLQuery( sql ); + + String s = (String) query.uniqueResult(); + LOG.debug( "Using Oracle charset " + s ); + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/hhh4635/LobTestEntity.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/hhh4635/LobTestEntity.java new file mode 100644 index 0000000000..9938d652a2 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/lob/hhh4635/LobTestEntity.java @@ -0,0 +1,48 @@ +package org.hibernate.test.annotations.lob.hhh4635; + +import java.sql.Blob; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Lob; +import javax.persistence.Table; + +@Entity +@Table( name = "lob_test" ) +public class LobTestEntity { + + @Id + private Long id; + + @Lob + private Blob lobValue; + + @Column( name = "qwerty", length = 4000 ) + private String qwerty; + + public void setId(Long id) { + this.id = id; + } + + public Long getId() { + return id; + } + + public void setLobValue(Blob lobValue) { + this.lobValue = lobValue; + } + + public Blob getLobValue() { + return lobValue; + } + + public void setQwerty(String qwerty) { + this.qwerty = qwerty; + } + + public String getQwerty() { + return qwerty; + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/manytomany/Zone.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/manytomany/Zone.java index c3bc5578ff..931f1d881f 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/manytomany/Zone.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/manytomany/Zone.java @@ -4,11 +4,13 @@ import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; +import javax.persistence.Table; /** * @author Emmanuel Bernard */ @Entity +@Table(name="`Zone`") public class Zone { private Integer id; diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/manytoonewithformula/Language.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/manytoonewithformula/Language.java index fd061e0f4f..c689be962b 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/manytoonewithformula/Language.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/manytoonewithformula/Language.java @@ -30,11 +30,13 @@ import java.io.Serializable; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; +import javax.persistence.Table; /** * @author Sharath Reddy */ @Entity +@Table(name="`Language`") public class Language implements Serializable { private static final long serialVersionUID = 1L; diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/onetoone/SerialNumber.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/onetoone/SerialNumber.java index 3a4edd026c..6507eca423 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/onetoone/SerialNumber.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/onetoone/SerialNumber.java @@ -2,6 +2,7 @@ package org.hibernate.test.annotations.onetoone; import javax.persistence.Entity; import javax.persistence.Id; +import javax.persistence.Column; /** * @author Emmanuel Bernard @@ -35,6 +36,7 @@ public class SerialNumber { this.id = id; } + @Column(name="`value`") public String getValue() { return value; } diff --git a/hibernate-core/src/test/java/org/hibernate/test/annotations/query/Dictionary.java b/hibernate-core/src/test/java/org/hibernate/test/annotations/query/Dictionary.java index cc8ee5d9a3..fe8a596e59 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/annotations/query/Dictionary.java +++ b/hibernate-core/src/test/java/org/hibernate/test/annotations/query/Dictionary.java @@ -25,7 +25,7 @@ import javax.persistence.SqlResultSetMapping; @FieldResult(name = "name", column = "name"), @FieldResult(name = "editor", column = "editor") }, - discriminatorColumn = "type" + discriminatorColumn = "`type`" ) } ) diff --git a/hibernate-core/src/test/java/org/hibernate/test/batchfetch/BatchFetchTest.java b/hibernate-core/src/test/java/org/hibernate/test/batchfetch/BatchFetchTest.java index 806d9951a4..4572399cfa 100755 --- a/hibernate-core/src/test/java/org/hibernate/test/batchfetch/BatchFetchTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/batchfetch/BatchFetchTest.java @@ -22,21 +22,23 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.test.batchfetch; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.junit.Test; - import org.hibernate.Hibernate; import org.hibernate.Session; import org.hibernate.Transaction; +import org.hibernate.cfg.AvailableSettings; +import org.hibernate.cfg.Configuration; +import org.hibernate.loader.BatchFetchStyle; import org.hibernate.testing.FailureExpectedWithNewMetamodel; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * @author Gavin King @@ -47,6 +49,18 @@ public class BatchFetchTest extends BaseCoreFunctionalTestCase { return new String[] { "batchfetch/ProductLine.hbm.xml" }; } + @Override + protected Class[] getAnnotatedClasses() { + return new Class[] { BatchLoadableEntity.class }; + } + + @Override + protected void configure(Configuration configuration) { + super.configure( configuration ); + configuration.setProperty( AvailableSettings.GENERATE_STATISTICS, "true" ); + configuration.setProperty( AvailableSettings.USE_SECOND_LEVEL_CACHE, "false" ); + } + @SuppressWarnings( {"unchecked"}) @Test @FailureExpectedWithNewMetamodel @@ -138,5 +152,53 @@ public class BatchFetchTest extends BaseCoreFunctionalTestCase { s.close(); } + @Test + @SuppressWarnings( {"unchecked"}) + public void testBatchFetch2() { + Session s = openSession(); + s.beginTransaction(); + int size = 32+14; + for ( int i = 0; i < size; i++ ) { + s.save( new BatchLoadableEntity( i ) ); + } + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + // load them all as proxies + for ( int i = 0; i < size; i++ ) { + BatchLoadableEntity entity = (BatchLoadableEntity) s.load( BatchLoadableEntity.class, i ); + assertFalse( Hibernate.isInitialized( entity ) ); + } + sessionFactory().getStatistics().clear(); + // now start initializing them... + for ( int i = 0; i < size; i++ ) { + BatchLoadableEntity entity = (BatchLoadableEntity) s.load( BatchLoadableEntity.class, i ); + Hibernate.initialize( entity ); + assertTrue( Hibernate.isInitialized( entity ) ); + } + // so at this point, all entities are initialized. see how many fetches were performed. + final int expectedFetchCount; + if ( sessionFactory().getSettings().getBatchFetchStyle() == BatchFetchStyle.LEGACY ) { + expectedFetchCount = 3; // (32 + 10 + 4) + } + else if ( sessionFactory().getSettings().getBatchFetchStyle() == BatchFetchStyle.DYNAMIC ) { + expectedFetchCount = 2; // (32 + 14) : because we limited batch-size to 32 + } + else { + // PADDED + expectedFetchCount = 2; // (32 + 16*) with the 16 being padded + } + assertEquals( expectedFetchCount, sessionFactory().getStatistics().getEntityStatistics( BatchLoadableEntity.class.getName() ).getFetchCount() ); + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + s.createQuery( "delete BatchLoadableEntity" ).executeUpdate(); + s.getTransaction().commit(); + s.close(); + } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/batchfetch/BatchLoadableEntity.java b/hibernate-core/src/test/java/org/hibernate/test/batchfetch/BatchLoadableEntity.java new file mode 100644 index 0000000000..2d15f95b30 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/batchfetch/BatchLoadableEntity.java @@ -0,0 +1,64 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.batchfetch; + +import javax.persistence.Entity; +import javax.persistence.Id; + +import org.hibernate.annotations.BatchSize; + +/** + * @author Steve Ebersole + */ +@Entity +@BatchSize( size = 32 ) +public class BatchLoadableEntity { + private Integer id; + private String name; + + public BatchLoadableEntity() { + } + + public BatchLoadableEntity(int id) { + this.id = id; + this.name = "Entity #" + id; + } + + @Id + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/cfg/persister/GoofyPersisterClassProvider.java b/hibernate-core/src/test/java/org/hibernate/test/cfg/persister/GoofyPersisterClassProvider.java index 771b7cd3a9..650eb0bc24 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/cfg/persister/GoofyPersisterClassProvider.java +++ b/hibernate-core/src/test/java/org/hibernate/test/cfg/persister/GoofyPersisterClassProvider.java @@ -800,5 +800,10 @@ public class GoofyPersisterClassProvider implements PersisterClassResolver { public Object getElementByIndex(Serializable key, Object index, SessionImplementor session, Object owner) { return null; } + + @Override + public int getBatchSize() { + return 0; + } } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/enums/TestEnumTypeSerialization.java b/hibernate-core/src/test/java/org/hibernate/test/enums/TestEnumTypeSerialization.java new file mode 100644 index 0000000000..446dd73b7a --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/enums/TestEnumTypeSerialization.java @@ -0,0 +1,66 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.enums; + +import java.util.Properties; + +import org.hibernate.internal.util.SerializationHelper; +import org.hibernate.type.EnumType; +import org.hibernate.usertype.DynamicParameterizedType; + +import org.junit.Test; + +import org.hibernate.testing.junit4.BaseUnitTestCase; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * @author Steve Ebersole + */ +public class TestEnumTypeSerialization extends BaseUnitTestCase { + @Test + public void testSerializability() { + { + // test ordinal mapping + EnumType enumType = new EnumType(); + Properties properties = new Properties(); + properties.put( EnumType.ENUM, UnspecifiedEnumTypeEntity.E1.class.getName() ); + enumType.setParameterValues( properties ); + assertTrue( enumType.isOrdinal() ); + SerializationHelper.clone( enumType ); + } + + { + // test named mapping + EnumType enumType = new EnumType(); + Properties properties = new Properties(); + properties.put( EnumType.ENUM, UnspecifiedEnumTypeEntity.E1.class.getName() ); + properties.put( EnumType.NAMED, "true" ); + enumType.setParameterValues( properties ); + assertFalse( enumType.isOrdinal() ); + SerializationHelper.clone( enumType ); + } + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/enums/UnspecifiedEnumTypeEntity.java b/hibernate-core/src/test/java/org/hibernate/test/enums/UnspecifiedEnumTypeEntity.java new file mode 100644 index 0000000000..f69488f125 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/enums/UnspecifiedEnumTypeEntity.java @@ -0,0 +1,82 @@ +package org.hibernate.test.enums; + +import java.io.Serializable; + +/** + * @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com) + */ +public class UnspecifiedEnumTypeEntity implements Serializable { + public static enum E1 { X, Y } + public static enum E2 { A, B } + + private Long id; + + private E1 enum1; + + private E2 enum2; + + public UnspecifiedEnumTypeEntity() { + } + + public UnspecifiedEnumTypeEntity(E1 enum1, E2 enum2) { + this.enum1 = enum1; + this.enum2 = enum2; + } + + public UnspecifiedEnumTypeEntity(E1 enum1, E2 enum2, Long id) { + this.enum1 = enum1; + this.enum2 = enum2; + this.id = id; + } + + @Override + public boolean equals(Object o) { + if ( this == o ) return true; + if ( ! ( o instanceof UnspecifiedEnumTypeEntity ) ) return false; + + UnspecifiedEnumTypeEntity that = (UnspecifiedEnumTypeEntity) o; + + if ( enum1 != that.enum1 ) return false; + if ( enum2 != that.enum2 ) return false; + if ( id != null ? !id.equals( that.id ) : that.id != null ) return false; + + return true; + } + + @Override + public int hashCode() { + int result = id != null ? id.hashCode() : 0; + result = 31 * result + ( enum1 != null ? enum1.hashCode() : 0 ); + result = 31 * result + ( enum2 != null ? enum2.hashCode() : 0 ); + return result; + } + + @Override + public String toString() { + return "UnspecifiedEnumTypeEntity(id = " + id + ", enum1 = " + enum1 + ", enum2 = " + enum2 + ")"; + } + + public E1 getEnum1() { + return enum1; + } + + public void setEnum1(E1 enum1) { + this.enum1 = enum1; + } + + public E2 getEnum2() { + return enum2; + } + + public void setEnum2(E2 enum2) { + this.enum2 = enum2; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/enums/UnspecifiedEnumTypeTest.java b/hibernate-core/src/test/java/org/hibernate/test/enums/UnspecifiedEnumTypeTest.java new file mode 100644 index 0000000000..84a9f1e000 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/enums/UnspecifiedEnumTypeTest.java @@ -0,0 +1,97 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.enums; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.hibernate.Session; +import org.hibernate.cfg.Configuration; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.H2Dialect; + +import org.hibernate.testing.RequiresDialect; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; + +/** + * @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com) + */ +@TestForIssue( jiraKey = "HHH-7780" ) +@RequiresDialect( value = H2Dialect.class ) +public class UnspecifiedEnumTypeTest extends BaseCoreFunctionalTestCase { + @Override + protected String[] getMappings() { + return new String[] { "enums/mappings.hbm.xml" }; + } + + @Override + protected void configure(Configuration configuration) { + super.configure( configuration ); + configuration.setProperty( Environment.HBM2DDL_AUTO, "" ); + } + + @Before + public void prepareTable() { + Session session = openSession(); + dropTable( session ); + createTable( session ); + session.close(); + } + + public void dropTable(Session session) { + executeUpdateSafety( session, "drop table ENUM_ENTITY if exists" ); + } + + private void createTable(Session session) { + executeUpdateSafety( + session, + "create table ENUM_ENTITY (ID bigint not null, enum1 varchar(255), enum2 integer, primary key (ID))" + ); + } + + @After + public void dropTable() { + dropTable( session ); + } + + @Test + public void testEnumTypeDiscovery() { + Session session = openSession(); + session.beginTransaction(); + UnspecifiedEnumTypeEntity entity = new UnspecifiedEnumTypeEntity( UnspecifiedEnumTypeEntity.E1.X, UnspecifiedEnumTypeEntity.E2.A ); + session.persist( entity ); + session.getTransaction().commit(); + session.close(); + } + + private void executeUpdateSafety(Session session, String query) { + try { + session.createSQLQuery( query ).executeUpdate(); + } + catch ( Exception e ) { + } + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/DetachedMultipleCollectionChangeTest.java b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/DetachedMultipleCollectionChangeTest.java new file mode 100644 index 0000000000..ec2e4ea75d --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/DetachedMultipleCollectionChangeTest.java @@ -0,0 +1,271 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2011, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.event.collection.detached; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertSame; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; + +import org.hibernate.Session; +import org.hibernate.event.spi.AbstractCollectionEvent; +import org.hibernate.event.spi.PostCollectionRecreateEvent; +import org.hibernate.event.spi.PreCollectionRemoveEvent; +import org.hibernate.event.spi.PreCollectionUpdateEvent; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.junit.Test; + +/** + * Test HHH-6361: Collection events may contain wrong stored snapshot after + * merging a detached entity into the persistencecontext. + * + * @author Erik-Berndt Scheper + */ +@TestForIssue( jiraKey = "HHH-6361" ) +public class DetachedMultipleCollectionChangeTest extends BaseCoreFunctionalTestCase { + + @Override + public String[] getMappings() { + return new String[] { "event/collection/detached/MultipleCollectionBagMapping.hbm.xml" }; + } + + @Override + protected void cleanupTest() { + Session s = null; + s = openSession(); + s.beginTransaction(); + s.createQuery("delete MultipleCollectionRefEntity1").executeUpdate(); + s.createQuery("delete MultipleCollectionRefEntity2").executeUpdate(); + s.createQuery("delete MultipleCollectionEntity").executeUpdate(); + s.getTransaction().commit(); + s.close(); + } + + @Test + public void testMergeMultipleCollectionChangeEvents() { + MultipleCollectionListeners listeners = new MultipleCollectionListeners( + sessionFactory()); + listeners.clear(); + int eventCount = 0; + + List oldRefentities1 + = new ArrayList(); + List oldRefentities2 + = new ArrayList(); + + Session s = openSession(); + s.beginTransaction(); + + MultipleCollectionEntity mce = new MultipleCollectionEntity(); + mce.setText("MultipleCollectionEntity-1"); + + s.save(mce); + s.getTransaction().commit(); + + checkListener(listeners, listeners.getPreCollectionRecreateListener(), + mce, oldRefentities1, eventCount++); + checkListener(listeners, listeners.getPostCollectionRecreateListener(), + mce, oldRefentities1, eventCount++); + checkListener(listeners, listeners.getPreCollectionRecreateListener(), + mce, oldRefentities2, eventCount++); + checkListener(listeners, listeners.getPostCollectionRecreateListener(), + mce, oldRefentities2, eventCount++); + checkEventCount(listeners, eventCount); + + s.close(); + + Long mceId1 = mce.getId(); + assertNotNull(mceId1); + + // add new entities to both collections + + MultipleCollectionEntity prevMce = mce.deepCopy(); + oldRefentities1 = prevMce.getRefEntities1(); + oldRefentities2 = prevMce.getRefEntities2(); + + listeners.clear(); + eventCount = 0; + + s = openSession(); + s.beginTransaction(); + + MultipleCollectionRefEntity1 re1_1 = new MultipleCollectionRefEntity1(); + re1_1.setText("MultipleCollectionRefEntity1-1"); + re1_1.setMultipleCollectionEntity(mce); + + MultipleCollectionRefEntity1 re1_2 = new MultipleCollectionRefEntity1(); + re1_2.setText("MultipleCollectionRefEntity1-2"); + re1_2.setMultipleCollectionEntity(mce); + + mce.addRefEntity1(re1_1); + mce.addRefEntity1(re1_2); + + mce = (MultipleCollectionEntity) s.merge(mce); + + s.getTransaction().commit(); + s.close(); + + checkListener(listeners, listeners.getInitializeCollectionListener(), + mce, null, eventCount++); + checkListener(listeners, listeners.getPreCollectionUpdateListener(), + mce, oldRefentities1, eventCount++); + checkListener(listeners, listeners.getPostCollectionUpdateListener(), + mce, mce.getRefEntities1(), eventCount++); + + s = openSession(); + s.beginTransaction(); + + MultipleCollectionRefEntity2 re2_1 = new MultipleCollectionRefEntity2(); + re2_1.setText("MultipleCollectionRefEntity2-1"); + re2_1.setMultipleCollectionEntity(mce); + + MultipleCollectionRefEntity2 re2_2 = new MultipleCollectionRefEntity2(); + re2_2.setText("MultipleCollectionRefEntity2-2"); + re2_2.setMultipleCollectionEntity(mce); + + mce.addRefEntity2(re2_1); + mce.addRefEntity2(re2_2); + + mce = (MultipleCollectionEntity) s.merge(mce); + + s.getTransaction().commit(); + + checkListener(listeners, listeners.getInitializeCollectionListener(), + mce, null, eventCount++); + checkListener(listeners, listeners.getPreCollectionUpdateListener(), + mce, oldRefentities2, eventCount++); + checkListener(listeners, listeners.getPostCollectionUpdateListener(), + mce, mce.getRefEntities2(), eventCount++); + checkEventCount(listeners, eventCount); + + s.close(); + + for (MultipleCollectionRefEntity1 refEnt1 : mce.getRefEntities1()) { + assertNotNull(refEnt1.getId()); + } + for (MultipleCollectionRefEntity2 refEnt2 : mce.getRefEntities2()) { + assertNotNull(refEnt2.getId()); + } + + // remove and add entities in both collections + + prevMce = mce.deepCopy(); + oldRefentities1 = prevMce.getRefEntities1(); + oldRefentities2 = prevMce.getRefEntities2(); + + listeners.clear(); + eventCount = 0; + + s = openSession(); + s.beginTransaction(); + + assertEquals(2, mce.getRefEntities1().size()); + assertEquals(2, mce.getRefEntities2().size()); + + mce.removeRefEntity1(re1_2); + + MultipleCollectionRefEntity1 re1_3 = new MultipleCollectionRefEntity1(); + re1_3.setText("MultipleCollectionRefEntity1-3"); + re1_3.setMultipleCollectionEntity(mce); + mce.addRefEntity1(re1_3); + + mce = (MultipleCollectionEntity) s.merge(mce); + + s.getTransaction().commit(); + s.close(); + + checkListener(listeners, listeners.getInitializeCollectionListener(), + mce, null, eventCount++); + checkListener(listeners, listeners.getPreCollectionUpdateListener(), + mce, oldRefentities1, eventCount++); + checkListener(listeners, listeners.getPostCollectionUpdateListener(), + mce, mce.getRefEntities1(), eventCount++); + + s = openSession(); + s.beginTransaction(); + + mce.removeRefEntity2(re2_2); + + MultipleCollectionRefEntity2 re2_3 = new MultipleCollectionRefEntity2(); + re2_3.setText("MultipleCollectionRefEntity2-3"); + re2_3.setMultipleCollectionEntity(mce); + mce.addRefEntity2(re2_3); + + mce = (MultipleCollectionEntity) s.merge(mce); + + s.getTransaction().commit(); + + checkListener(listeners, listeners.getInitializeCollectionListener(), + mce, null, eventCount++); + checkListener(listeners, listeners.getPreCollectionUpdateListener(), + mce, oldRefentities2, eventCount++); + checkListener(listeners, listeners.getPostCollectionUpdateListener(), + mce, mce.getRefEntities2(), eventCount++); + + checkEventCount(listeners, eventCount); + + s.close(); + } + + protected void checkListener( + MultipleCollectionListeners listeners, + MultipleCollectionListeners.Listener listenerExpected, + org.hibernate.test.event.collection.Entity ownerExpected, + List expectedCollectionEntrySnapshot, + int index) { + AbstractCollectionEvent event = (AbstractCollectionEvent) listeners + .getEvents().get(index); + + assertSame(listenerExpected, listeners.getListenersCalled().get(index)); + assertEquals(ownerExpected, event.getAffectedOwnerOrNull()); + assertEquals(ownerExpected.getId(), event.getAffectedOwnerIdOrNull()); + assertEquals(ownerExpected.getClass().getName(), + event.getAffectedOwnerEntityName()); + + if (event instanceof PreCollectionUpdateEvent) { + Serializable snapshot = listeners.getSnapshots().get(index); + assertEquals(expectedCollectionEntrySnapshot, snapshot); + } + if (event instanceof PreCollectionRemoveEvent) { + Serializable snapshot = listeners.getSnapshots().get(index); + assertEquals(expectedCollectionEntrySnapshot, snapshot); + } + if (event instanceof PostCollectionRecreateEvent) { + Serializable snapshot = listeners.getSnapshots().get(index); + assertEquals(expectedCollectionEntrySnapshot, snapshot); + } + + } + + private void checkEventCount(MultipleCollectionListeners listeners, + int nEventsExpected) { + assertEquals(nEventsExpected, listeners.getListenersCalled().size()); + assertEquals(nEventsExpected, listeners.getEvents().size()); + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionEntity.java b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionEntity.java new file mode 100644 index 0000000000..24e055d1fe --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionEntity.java @@ -0,0 +1,168 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2011, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.event.collection.detached; + +import java.util.ArrayList; +import java.util.List; + +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.OneToMany; + +@Entity +public class MultipleCollectionEntity implements org.hibernate.test.event.collection.Entity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "ID", length = 10) + private Long id; + + @Column(name = "TEXT", length = 50, nullable = false) + private String text; + + @OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, orphanRemoval = true) + @JoinColumn(name = "MCE_ID", nullable = false) + private List refEntities1 = new ArrayList(); + + @OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, orphanRemoval = true) + @JoinColumn(name = "MCE_ID", nullable = false) + private List refEntities2 = new ArrayList(); + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + + public List getRefEntities1() { + return refEntities1; + } + + public void setRefEntities1(List refEntities1) { + this.refEntities1 = refEntities1; + } + + public void addRefEntity1(MultipleCollectionRefEntity1 refEntity1) { + refEntities1.add(refEntity1); + } + + public void removeRefEntity1(MultipleCollectionRefEntity1 refEntity1) { + refEntities1.remove(refEntity1); + } + + public List getRefEntities2() { + return refEntities2; + } + + public void setRefEntities2(List refEntities2) { + this.refEntities2 = refEntities2; + } + + public void addRefEntity2(MultipleCollectionRefEntity2 refEntity2) { + refEntities2.add(refEntity2); + } + + public void removeRefEntity2(MultipleCollectionRefEntity2 refEntity2) { + refEntities2.remove(refEntity2); + } + + @Override + public String toString() { + return "MultipleCollectionEntity [id=" + id + ", text=" + text + + ", refEntities1=" + refEntities1 + ", refEntities2=" + + refEntities2 + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((text == null) ? 0 : text.hashCode()); + result = prime * result + + ((refEntities1 == null) ? 0 : refEntities1.hashCode()); + result = prime * result + + ((refEntities2 == null) ? 0 : refEntities2.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + MultipleCollectionEntity other = (MultipleCollectionEntity) obj; + if (text == null) { + if (other.text != null) + return false; + } else if (!text.equals(other.text)) + return false; + if (refEntities1 == null) { + if (other.refEntities1 != null) + return false; + } else if (!refEntities1.equals(other.refEntities1)) + return false; + if (refEntities2 == null) { + if (other.refEntities2 != null) + return false; + } else if (!refEntities2.equals(other.refEntities2)) + return false; + return true; + } + + + public MultipleCollectionEntity deepCopy() { + MultipleCollectionEntity clone = new MultipleCollectionEntity(); + clone.setText(this.text); + clone.setId(this.id); + + for (MultipleCollectionRefEntity1 refEntity1 : refEntities1) { + clone.addRefEntity1(refEntity1.deepCopy(clone)); + } + for (MultipleCollectionRefEntity2 refEntity2 : refEntities2) { + clone.addRefEntity2(refEntity2.deepCopy(clone)); + } + return clone; + } + + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionListeners.java b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionListeners.java new file mode 100644 index 0000000000..333da328dc --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionListeners.java @@ -0,0 +1,265 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2011, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.event.collection.detached; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; + +import org.hibernate.SessionFactory; +import org.hibernate.engine.spi.CollectionEntry; +import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.event.internal.DefaultInitializeCollectionEventListener; +import org.hibernate.event.service.spi.EventListenerRegistry; +import org.hibernate.event.spi.AbstractCollectionEvent; +import org.hibernate.event.spi.EventType; +import org.hibernate.event.spi.InitializeCollectionEvent; +import org.hibernate.event.spi.PostCollectionRecreateEvent; +import org.hibernate.event.spi.PostCollectionRecreateEventListener; +import org.hibernate.event.spi.PostCollectionRemoveEvent; +import org.hibernate.event.spi.PostCollectionRemoveEventListener; +import org.hibernate.event.spi.PostCollectionUpdateEvent; +import org.hibernate.event.spi.PostCollectionUpdateEventListener; +import org.hibernate.event.spi.PreCollectionRecreateEvent; +import org.hibernate.event.spi.PreCollectionRecreateEventListener; +import org.hibernate.event.spi.PreCollectionRemoveEvent; +import org.hibernate.event.spi.PreCollectionRemoveEventListener; +import org.hibernate.event.spi.PreCollectionUpdateEvent; +import org.hibernate.event.spi.PreCollectionUpdateEventListener; +import org.jboss.logging.Logger; + +/** + * Support listeners for Test HHH-6361: Collection events may contain wrong + * stored snapshot after merging a detached entity into the persistencecontext. + * + * @author Erik-Berndt Scheper + */ +public class MultipleCollectionListeners { + + private final Logger log = Logger.getLogger(MultipleCollectionListeners.class); + + public interface Listener extends Serializable { + void addEvent(AbstractCollectionEvent event, Listener listener); + } + + public static abstract class AbstractListener implements Listener { + + private final MultipleCollectionListeners listeners; + + protected AbstractListener(MultipleCollectionListeners listeners) { + this.listeners = listeners; + } + + public void addEvent(AbstractCollectionEvent event, Listener listener) { + listeners.addEvent(event, listener); + } + } + + public static class InitializeCollectionListener extends + DefaultInitializeCollectionEventListener implements Listener { + private final MultipleCollectionListeners listeners; + + private InitializeCollectionListener( + MultipleCollectionListeners listeners) { + this.listeners = listeners; + } + + public void onInitializeCollection(InitializeCollectionEvent event) { + super.onInitializeCollection(event); + addEvent(event, this); + } + + public void addEvent(AbstractCollectionEvent event, Listener listener) { + listeners.addEvent(event, listener); + } + } + + public static class PreCollectionRecreateListener extends AbstractListener + implements PreCollectionRecreateEventListener { + private PreCollectionRecreateListener( + MultipleCollectionListeners listeners) { + super(listeners); + } + + public void onPreRecreateCollection(PreCollectionRecreateEvent event) { + addEvent(event, this); + } + } + + public static class PostCollectionRecreateListener extends AbstractListener + implements PostCollectionRecreateEventListener { + private PostCollectionRecreateListener( + MultipleCollectionListeners listeners) { + super(listeners); + } + + public void onPostRecreateCollection(PostCollectionRecreateEvent event) { + addEvent(event, this); + } + } + + public static class PreCollectionRemoveListener extends AbstractListener + implements PreCollectionRemoveEventListener { + private PreCollectionRemoveListener( + MultipleCollectionListeners listeners) { + super(listeners); + } + + public void onPreRemoveCollection(PreCollectionRemoveEvent event) { + addEvent(event, this); + } + } + + public static class PostCollectionRemoveListener extends AbstractListener + implements PostCollectionRemoveEventListener { + private PostCollectionRemoveListener( + MultipleCollectionListeners listeners) { + super(listeners); + } + + public void onPostRemoveCollection(PostCollectionRemoveEvent event) { + addEvent(event, this); + } + } + + public static class PreCollectionUpdateListener extends AbstractListener + implements PreCollectionUpdateEventListener { + private PreCollectionUpdateListener( + MultipleCollectionListeners listeners) { + super(listeners); + } + + public void onPreUpdateCollection(PreCollectionUpdateEvent event) { + addEvent(event, this); + } + } + + public static class PostCollectionUpdateListener extends AbstractListener + implements PostCollectionUpdateEventListener { + private PostCollectionUpdateListener( + MultipleCollectionListeners listeners) { + super(listeners); + } + + public void onPostUpdateCollection(PostCollectionUpdateEvent event) { + addEvent(event, this); + } + } + + private final PreCollectionRecreateListener preCollectionRecreateListener; + private final InitializeCollectionListener initializeCollectionListener; + private final PreCollectionRemoveListener preCollectionRemoveListener; + private final PreCollectionUpdateListener preCollectionUpdateListener; + private final PostCollectionRecreateListener postCollectionRecreateListener; + private final PostCollectionRemoveListener postCollectionRemoveListener; + private final PostCollectionUpdateListener postCollectionUpdateListener; + + private List listenersCalled = new ArrayList(); + private List events = new ArrayList(); + private List snapshots = new ArrayList(); + + public MultipleCollectionListeners(SessionFactory sf) { + preCollectionRecreateListener = new PreCollectionRecreateListener(this); + initializeCollectionListener = new InitializeCollectionListener(this); + preCollectionRemoveListener = new PreCollectionRemoveListener(this); + preCollectionUpdateListener = new PreCollectionUpdateListener(this); + postCollectionRecreateListener = new PostCollectionRecreateListener( + this); + postCollectionRemoveListener = new PostCollectionRemoveListener(this); + postCollectionUpdateListener = new PostCollectionUpdateListener(this); + EventListenerRegistry registry = ( (SessionFactoryImplementor) sf ).getServiceRegistry().getService( EventListenerRegistry.class ); + registry.setListeners( EventType.INIT_COLLECTION, initializeCollectionListener ); + + registry.setListeners( EventType.PRE_COLLECTION_RECREATE, preCollectionRecreateListener ); + registry.setListeners( EventType.POST_COLLECTION_RECREATE, postCollectionRecreateListener ); + + registry.setListeners( EventType.PRE_COLLECTION_REMOVE, preCollectionRemoveListener ); + registry.setListeners( EventType.POST_COLLECTION_REMOVE, postCollectionRemoveListener ); + + registry.setListeners( EventType.PRE_COLLECTION_UPDATE, preCollectionUpdateListener ); + registry.setListeners( EventType.POST_COLLECTION_UPDATE, postCollectionUpdateListener ); + } + + public void addEvent(AbstractCollectionEvent event, Listener listener) { + + CollectionEntry collectionEntry = event.getSession() + .getPersistenceContext() + .getCollectionEntry(event.getCollection()); + + Serializable snapshot = collectionEntry.getSnapshot(); + + log.debug("add Event: " + event.getClass() + "; listener = " + + listener.getClass() + "; snapshot = " + snapshot); + + listenersCalled.add(listener); + events.add(event); + snapshots.add(snapshot); + } + + public List getListenersCalled() { + return listenersCalled; + } + + public List getEvents() { + return events; + } + + public List getSnapshots() { + return snapshots; + } + + public void clear() { + listenersCalled.clear(); + events.clear(); + snapshots.clear(); + } + + public PreCollectionRecreateListener getPreCollectionRecreateListener() { + return preCollectionRecreateListener; + } + + public InitializeCollectionListener getInitializeCollectionListener() { + return initializeCollectionListener; + } + + public PreCollectionRemoveListener getPreCollectionRemoveListener() { + return preCollectionRemoveListener; + } + + public PreCollectionUpdateListener getPreCollectionUpdateListener() { + return preCollectionUpdateListener; + } + + public PostCollectionRecreateListener getPostCollectionRecreateListener() { + return postCollectionRecreateListener; + } + + public PostCollectionRemoveListener getPostCollectionRemoveListener() { + return postCollectionRemoveListener; + } + + public PostCollectionUpdateListener getPostCollectionUpdateListener() { + return postCollectionUpdateListener; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionRefEntity1.java b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionRefEntity1.java new file mode 100644 index 0000000000..a7e82862fd --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionRefEntity1.java @@ -0,0 +1,127 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2011, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.event.collection.detached; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; + +@Entity +public class MultipleCollectionRefEntity1 implements org.hibernate.test.event.collection.Entity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "ID", length = 10) + private Long id; + + @Column(name = "TEXT", length = 50, nullable = false) + private String text; + + @ManyToOne + @JoinColumn(name = "MCE_ID", nullable = false, insertable = false, updatable = false) + @org.hibernate.annotations.ForeignKey(name = "FK_RE1_MCE") + private MultipleCollectionEntity multipleCollectionEntity; + + @Column(name = "MCE_ID", insertable = false, updatable = false) + private Long multipleCollectionEntityId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + + public MultipleCollectionEntity getMultipleCollectionEntity() { + return multipleCollectionEntity; + } + + public void setMultipleCollectionEntity( + MultipleCollectionEntity multipleCollectionEntity) { + this.multipleCollectionEntity = multipleCollectionEntity; + } + + public Long getMultipleCollectionEntityId() { + return multipleCollectionEntityId; + } + + public void setMultipleCollectionEntityId(Long multipleCollectionEntityId) { + this.multipleCollectionEntityId = multipleCollectionEntityId; + } + + @Override + public String toString() { + return "MultipleCollectionRefEntity1 [id=" + id + ", text=" + text + + ", multipleCollectionEntityId=" + multipleCollectionEntityId + + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((text == null) ? 0 : text.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + MultipleCollectionRefEntity1 other = (MultipleCollectionRefEntity1) obj; + if (text == null) { + if (other.text != null) + return false; + } else if (!text.equals(other.text)) + return false; + return true; + } + + public MultipleCollectionRefEntity1 deepCopy(MultipleCollectionEntity newRef) { + MultipleCollectionRefEntity1 clone = new MultipleCollectionRefEntity1(); + clone.setText(this.text); + clone.setId(this.id); + clone.setMultipleCollectionEntity(newRef); + clone.setMultipleCollectionEntityId(newRef.getId()); + return clone; + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionRefEntity2.java b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionRefEntity2.java new file mode 100644 index 0000000000..eafb2ab82f --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/event/collection/detached/MultipleCollectionRefEntity2.java @@ -0,0 +1,128 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2011, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.event.collection.detached; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; + +@Entity +public class MultipleCollectionRefEntity2 implements org.hibernate.test.event.collection.Entity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "ID", length = 10) + private Long id; + + @Column(name = "TEXT", length = 50, nullable = false) + private String text; + + @ManyToOne + @JoinColumn(name = "MCE_ID", nullable = false, insertable = false, updatable = false) + @org.hibernate.annotations.ForeignKey(name = "FK_RE2_MCE") + private MultipleCollectionEntity multipleCollectionEntity; + + @Column(name = "MCE_ID", insertable = false, updatable = false) + private Long multipleCollectionEntityId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + + public MultipleCollectionEntity getMultipleCollectionEntity() { + return multipleCollectionEntity; + } + + public void setMultipleCollectionEntity( + MultipleCollectionEntity multipleCollectionEntity) { + this.multipleCollectionEntity = multipleCollectionEntity; + } + + public Long getMultipleCollectionEntityId() { + return multipleCollectionEntityId; + } + + public void setMultipleCollectionEntityId(Long multipleCollectionEntityId) { + this.multipleCollectionEntityId = multipleCollectionEntityId; + } + + @Override + public String toString() { + return "MultipleCollectionRefEntity2 [id=" + id + ", text=" + text + + ", multipleCollectionEntityId=" + multipleCollectionEntityId + + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((text == null) ? 0 : text.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + MultipleCollectionRefEntity2 other = (MultipleCollectionRefEntity2) obj; + if (text == null) { + if (other.text != null) + return false; + } else if (!text.equals(other.text)) + return false; + return true; + } + + + public MultipleCollectionRefEntity2 deepCopy(MultipleCollectionEntity newRef) { + MultipleCollectionRefEntity2 clone = new MultipleCollectionRefEntity2(); + clone.setText(this.text); + clone.setId(this.id); + clone.setMultipleCollectionEntity(newRef); + clone.setMultipleCollectionEntityId(newRef.getId()); + return clone; + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/filter/hql/JoinedFilteredBulkManipulationTest.java b/hibernate-core/src/test/java/org/hibernate/test/filter/hql/JoinedFilteredBulkManipulationTest.java index 4e4a2e4f9b..e1d8c0b709 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/filter/hql/JoinedFilteredBulkManipulationTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/filter/hql/JoinedFilteredBulkManipulationTest.java @@ -24,6 +24,8 @@ package org.hibernate.test.filter.hql; import java.util.Date; +import org.hibernate.dialect.CUBRIDDialect; +import org.hibernate.testing.SkipForDialect; import org.junit.Test; import org.hibernate.Session; @@ -36,6 +38,11 @@ import static org.junit.Assert.assertEquals; * @author Steve Ebersole */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class JoinedFilteredBulkManipulationTest extends BaseCoreFunctionalTestCase { @Override public String[] getMappings() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/hql/ASTParserLoadingTest.java b/hibernate-core/src/test/java/org/hibernate/test/hql/ASTParserLoadingTest.java index 847930dbbd..8318149d98 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/hql/ASTParserLoadingTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/hql/ASTParserLoadingTest.java @@ -66,6 +66,7 @@ import org.hibernate.dialect.Sybase11Dialect; import org.hibernate.dialect.SybaseASE15Dialect; import org.hibernate.dialect.SybaseAnywhereDialect; import org.hibernate.dialect.SybaseDialect; +import org.hibernate.dialect.CUBRIDDialect; import org.hibernate.hql.internal.ast.ASTQueryTranslatorFactory; import org.hibernate.internal.util.StringHelper; import org.hibernate.persister.entity.DiscriminatorType; @@ -105,6 +106,11 @@ import org.junit.Test; * @author Steve */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class ASTParserLoadingTest extends BaseCoreFunctionalTestCase { private static final Logger log = Logger.getLogger( ASTParserLoadingTest.class ); @@ -1819,6 +1825,13 @@ public class ASTParserLoadingTest extends BaseCoreFunctionalTestCase { } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of version 8.4.1 CUBRID does not support temporary tables." + + " This test somehow calls MultiTableDeleteExecutor which raises an" + + " exception saying 'cannot doAfterTransactionCompletion multi-table" + + " deletes using dialect not supporting temp tables'." + ) public void testParameterMixing() { Session s = openSession(); Transaction t = s.beginTransaction(); diff --git a/hibernate-core/src/test/java/org/hibernate/test/hql/BulkManipulationTest.java b/hibernate-core/src/test/java/org/hibernate/test/hql/BulkManipulationTest.java index fe98879562..49f4849d01 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/hql/BulkManipulationTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/hql/BulkManipulationTest.java @@ -28,6 +28,8 @@ import java.util.Date; import java.util.List; import junit.framework.AssertionFailedError; +import org.hibernate.dialect.CUBRIDDialect; +import org.hibernate.testing.SkipForDialect; import org.junit.Test; import org.hibernate.QueryException; @@ -118,6 +120,11 @@ public class BulkManipulationTest extends BaseCoreFunctionalTestCase { } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testTempTableGenerationIsolation() throws Throwable{ Session s = openSession(); s.beginTransaction(); @@ -526,6 +533,11 @@ public class BulkManipulationTest extends BaseCoreFunctionalTestCase { } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testInsertWithSelectListUsingJoins() { // this is just checking parsing and syntax... Session s = openSession(); @@ -729,6 +741,11 @@ public class BulkManipulationTest extends BaseCoreFunctionalTestCase { } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testUpdateOnManyToOne() { Session s = openSession(); Transaction t = s.beginTransaction(); @@ -1160,6 +1177,11 @@ public class BulkManipulationTest extends BaseCoreFunctionalTestCase { } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testDeleteWithMetadataWhereFragments() throws Throwable { Session s = openSession(); Transaction t = s.beginTransaction(); diff --git a/hibernate-core/src/test/java/org/hibernate/test/hql/ScrollableCollectionFetchingTest.java b/hibernate-core/src/test/java/org/hibernate/test/hql/ScrollableCollectionFetchingTest.java index aae68ae40d..ebba1a8dad 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/hql/ScrollableCollectionFetchingTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/hql/ScrollableCollectionFetchingTest.java @@ -23,6 +23,7 @@ */ package org.hibernate.test.hql; +import org.hibernate.dialect.CUBRIDDialect; import org.junit.Test; import org.hibernate.HibernateException; @@ -142,6 +143,11 @@ public class ScrollableCollectionFetchingTest extends BaseCoreFunctionalTestCase } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testScrollingJoinFetchesSingleRowResultSet() { Session s = openSession(); Transaction txn = s.beginTransaction(); @@ -295,6 +301,11 @@ public class ScrollableCollectionFetchingTest extends BaseCoreFunctionalTestCase } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testScrollingJoinFetchesReverse() { TestData data = new TestData(); data.prepare(); @@ -324,6 +335,11 @@ public class ScrollableCollectionFetchingTest extends BaseCoreFunctionalTestCase } @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testScrollingJoinFetchesPositioning() { TestData data = new TestData(); data.prepare(); diff --git a/hibernate-core/src/test/java/org/hibernate/test/hql/TupleSupportTest.java b/hibernate-core/src/test/java/org/hibernate/test/hql/TupleSupportTest.java new file mode 100644 index 0000000000..5e962a1638 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/hql/TupleSupportTest.java @@ -0,0 +1,130 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2012, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.hql; + +import javax.persistence.Embeddable; +import javax.persistence.Embedded; +import javax.persistence.Entity; +import javax.persistence.Id; + +import java.util.Collections; + +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.cfg.AvailableSettings; +import org.hibernate.cfg.Configuration; +import org.hibernate.dialect.H2Dialect; +import org.hibernate.engine.query.spi.HQLQueryPlan; +import org.hibernate.engine.spi.SessionFactoryImplementor; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.hibernate.testing.junit4.BaseUnitTestCase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * @author Steve Ebersole + */ +@TestForIssue( jiraKey = "HHH-7757" ) +public class TupleSupportTest extends BaseUnitTestCase { + @Entity( name = "TheEntity" ) + public static class TheEntity { + @Id + private Long id; + @Embedded + private TheComposite compositeValue; + } + + @Embeddable + public static class TheComposite { + private String thing1; + private String thing2; + + public TheComposite() { + } + + public TheComposite(String thing1, String thing2) { + this.thing1 = thing1; + this.thing2 = thing2; + } + } + + private SessionFactory sessionFactory; + + @Before + public void buildSessionFactory() { + Configuration cfg = new Configuration() + .addAnnotatedClass( TheEntity.class ); + cfg.getProperties().put( AvailableSettings.DIALECT, NoTupleSupportDialect.class.getName() ); + cfg.getProperties().put( AvailableSettings.HBM2DDL_AUTO, "create-drop" ); + sessionFactory = cfg.buildSessionFactory(); + } + + @After + public void releaseSessionFactory() { + sessionFactory.close(); + } + + @Test + public void testImplicitTupleNotEquals() { + final String hql = "from TheEntity e where e.compositeValue <> :p1"; + HQLQueryPlan queryPlan = ( (SessionFactoryImplementor) sessionFactory ).getQueryPlanCache() + .getHQLQueryPlan( hql, false, Collections.emptyMap() ); + + assertEquals( 1, queryPlan.getSqlStrings().length ); + System.out.println( " SQL : " + queryPlan.getSqlStrings()[0] ); + assertTrue( queryPlan.getSqlStrings()[0].contains( "<>" ) ); + } + + @Test + public void testImplicitTupleNotInList() { + final String hql = "from TheEntity e where e.compositeValue not in (:p1,:p2)"; + HQLQueryPlan queryPlan = ( (SessionFactoryImplementor) sessionFactory ).getQueryPlanCache() + .getHQLQueryPlan( hql, false, Collections.emptyMap() ); + + assertEquals( 1, queryPlan.getSqlStrings().length ); + System.out.println( " SQL : " + queryPlan.getSqlStrings()[0] ); + assertTrue( queryPlan.getSqlStrings()[0].contains( "<>" ) ); + } + + public static class NoTupleSupportDialect extends H2Dialect { + @Override + public boolean supportsRowValueConstructorSyntax() { + return false; + } + + @Override + public boolean supportsRowValueConstructorSyntaxInInList() { + return false; + } + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/hql/joinedSubclass/JoinedSubclassBulkManipTest.java b/hibernate-core/src/test/java/org/hibernate/test/hql/joinedSubclass/JoinedSubclassBulkManipTest.java index b3b1328ee2..f3e5553faf 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/hql/joinedSubclass/JoinedSubclassBulkManipTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/hql/joinedSubclass/JoinedSubclassBulkManipTest.java @@ -23,6 +23,8 @@ */ package org.hibernate.test.hql.joinedSubclass; +import org.hibernate.dialect.CUBRIDDialect; +import org.hibernate.testing.SkipForDialect; import org.junit.Test; import org.hibernate.Session; @@ -34,6 +36,11 @@ import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; * @author Steve Ebersole */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class JoinedSubclassBulkManipTest extends BaseCoreFunctionalTestCase { @Override protected Class[] getAnnotatedClasses() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/Child.java b/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/Child.java new file mode 100644 index 0000000000..dfd7072816 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/Child.java @@ -0,0 +1,25 @@ +package org.hibernate.test.hqlfetchscroll; + +public class Child { + + private String name; + + Child() { + } + + public Child(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + private void setName(String name) { + this.name = name; + } + + public String toString() { + return name; + } +} \ No newline at end of file diff --git a/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/HQLScrollFetchTest.java b/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/HQLScrollFetchTest.java new file mode 100644 index 0000000000..5b373219ad --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/HQLScrollFetchTest.java @@ -0,0 +1,354 @@ +package org.hibernate.test.hqlfetchscroll; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; + +import org.hibernate.Hibernate; +import org.hibernate.ScrollableResults; +import org.hibernate.Session; +import org.hibernate.Transaction; +import org.hibernate.dialect.H2Dialect; +import org.hibernate.dialect.Oracle8iDialect; +import org.hibernate.dialect.SQLServerDialect; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.testing.SkipForDialect; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.hibernate.transform.DistinctRootEntityResultTransformer; +import org.junit.Test; + + +public class HQLScrollFetchTest extends BaseCoreFunctionalTestCase { + private static final String QUERY = "select p from Parent p join fetch p.children c"; + + @Test + public void testNoScroll() { + Session s = openSession(); + List list = s.createQuery( QUERY ).setResultTransformer( DistinctRootEntityResultTransformer.INSTANCE ).list(); + assertResultFromAllUsers( list ); + s.close(); + } + + @Test + @SkipForDialect( { SQLServerDialect.class, Oracle8iDialect.class, H2Dialect.class } ) + public void testScroll() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY ).scroll(); + List list = new ArrayList(); + while ( results.next() ) { + list.add( results.get( 0 ) ); + } + assertResultFromAllUsers( list ); + s.close(); + } + + @Test + public void testIncompleteScrollFirstResult() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc" ).scroll(); + results.next(); + Parent p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283" ) + public void testIncompleteScrollSecondResult() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc" ).scroll(); + results.next(); + Parent p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + results.next(); + p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + s.close(); + } + + @Test + public void testIncompleteScrollFirstResultInTransaction() { + Session s = openSession(); + Transaction tx = s.beginTransaction(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc" ).scroll(); + results.next(); + Parent p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + tx.commit(); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283" ) + public void testIncompleteScrollSecondResultInTransaction() { + Session s = openSession(); + Transaction tx = s.beginTransaction(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc" ).scroll(); + results.next(); + Parent p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + results.next(); + p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + tx.commit(); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283") + public void testIncompleteScroll() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc" ).scroll(); + results.next(); + Parent p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + // get the other parent entity from the persistence context along with its first child + // retrieved from the resultset. + Parent pOther = null; + Child cOther = null; + for ( Object entity : ( (SessionImplementor) s ).getPersistenceContext().getEntitiesByKey().values() ) { + if ( Parent.class.isInstance( entity ) ) { + if ( entity != p ) { + if ( pOther != null ) { + fail( "unexpected parent found." ); + } + pOther = (Parent) entity; + } + } + else if ( Child.class.isInstance( entity ) ) { + if ( ! p.getChildren().contains( entity ) ) { + if ( cOther != null ) { + fail( "unexpected child entity found" ); + } + cOther = (Child) entity; + } + } + else { + fail( "unexpected type of entity." ); + } + } + // check that the same second parent is obtained by calling Session.get() + assertNull( pOther ); + assertNull( cOther ); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283" ) + public void testIncompleteScrollLast() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc" ).scroll(); + results.next(); + Parent p = (Parent) results.get( 0 ); + assertResultFromOneUser( p ); + results.last(); + // get the other parent entity from the persistence context. + // since the result set was scrolled to the end, the other parent entity's collection has been + // properly initialized. + Parent pOther = null; + Set childrenOther = new HashSet(); + for ( Object entity : ( ( SessionImplementor) s ).getPersistenceContext().getEntitiesByKey().values() ) { + if ( Parent.class.isInstance( entity ) ) { + if ( entity != p ) { + if ( pOther != null ) { + fail( "unexpected parent found." ); + } + pOther = (Parent) entity; + } + } + else if ( Child.class.isInstance( entity ) ) { + if ( ! p.getChildren().contains( entity ) ) { + childrenOther.add( entity ); + } + } + else { + fail( "unexpected type of entity." ); + } + } + // check that the same second parent is obtained by calling Session.get() + assertSame( pOther, s.get( Parent.class, "parent2" ) ); + assertNotNull( pOther ); + // access pOther's collection; should be completely loaded + assertTrue( Hibernate.isInitialized( pOther.getChildren() ) ); + assertEquals( childrenOther, pOther.getChildren() ); + assertResultFromOneUser( pOther ); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283" ) + public void testScrollOrderParentAsc() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc" ).scroll(); + List list = new ArrayList(); + while ( results.next() ) { + list.add( results.get( 0 ) ); + } + assertResultFromAllUsers( list ); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283" ) + public void testScrollOrderParentDesc() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name desc" ).scroll(); + List list = new ArrayList(); + while ( results.next() ) { + list.add( results.get( 0 ) ); + } + assertResultFromAllUsers( list ); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283" ) + public void testScrollOrderParentAscChildrenAsc() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc, c.name asc" ).scroll(); + List list = new ArrayList(); + while ( results.next() ) { + list.add( results.get( 0 ) ); + } + assertResultFromAllUsers( list ); + s.close(); + } + + @Test + @TestForIssue( jiraKey = "HHH-1283" ) + public void testScrollOrderParentAscChildrenDesc() { + Session s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by p.name asc, c.name desc" ).scroll(); + List list = new ArrayList(); + while ( results.next() ) { + list.add( results.get( 0 ) ); + } + assertResultFromAllUsers( list ); + s.close(); + } + + @Test + public void testScrollOrderChildrenDesc() { + Session s = openSession(); + Transaction t = s.beginTransaction(); + Parent p0 = new Parent( "parent0" ); + s.save( p0 ); + t.commit(); + s.close(); + s = openSession(); + ScrollableResults results = s.createQuery( QUERY + " order by c.name desc" ).scroll(); + List list = new ArrayList(); + while ( results.next() ) { + list.add( results.get( 0 ) ); + } + try { + assertResultFromAllUsers( list ); + fail( "should have failed because data is ordered incorrectly." ); + } + catch ( AssertionError ex ) { + // expected + } + finally { + s.close(); + } + } + + @Test + public void testListOrderChildrenDesc() { + Session s = openSession(); + Transaction t = s.beginTransaction(); + Parent p0 = new Parent( "parent0" ); + s.save( p0 ); + t.commit(); + s.close(); + s = openSession(); + List results = s.createQuery( QUERY + " order by c.name desc" ).list(); + try { + assertResultFromAllUsers( results ); + fail( "should have failed because data is ordered incorrectly." ); + } + catch ( AssertionError ex ) { + // expected + } + finally { + s.close(); + } + } + + private void assertResultFromOneUser(Parent parent) { + assertEquals( + "parent " + parent + " has incorrect collection(" + parent.getChildren() + ").", + 3, + parent.getChildren().size() + ); + } + + private void assertResultFromAllUsers(List list) { + assertEquals( "list is not correct size: ", 2, list.size() ); + for ( Object aList : list ) { + assertResultFromOneUser( (Parent) aList ); + } + } + + @Override + protected void prepareTest() throws Exception { + Session s = openSession(); + Transaction t = s.beginTransaction(); + Child child_1_1 = new Child( "achild1-1"); + Child child_1_2 = new Child( "ychild1-2"); + Child child_1_3 = new Child( "dchild1-3"); + Child child_2_1 = new Child( "bchild2-1"); + Child child_2_2 = new Child( "cchild2-2"); + Child child_2_3 = new Child( "zchild2-3"); + + s.save( child_1_1 ); + s.save( child_2_1 ); + s.save( child_1_2 ); + s.save( child_2_2 ); + s.save( child_1_3 ); + s.save( child_2_3 ); + + s.flush(); + + Parent p1 = new Parent( "parent1" ); + p1.addChild( child_1_1 ); + p1.addChild( child_1_2 ); + p1.addChild( child_1_3 ); + s.save( p1 ); + + Parent p2 = new Parent( "parent2" ); + p2.addChild( child_2_1 ); + p2.addChild( child_2_2 ); + p2.addChild( child_2_3 ); + s.save( p2 ); + + t.commit(); + s.close(); + } + + @Override + protected void cleanupTest() throws Exception { + Session s = openSession(); + Transaction t = s.beginTransaction(); + List list = s.createQuery( "from Parent" ).list(); + for ( Iterator i = list.iterator(); i.hasNext(); ) { + s.delete( (Parent) i.next() ); + } + t.commit(); + s.close(); + } + + public String[] getMappings() { + return new String[] { "hqlfetchscroll/ParentChild.hbm.xml" }; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/Parent.java b/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/Parent.java new file mode 100644 index 0000000000..253b53cd4e --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/hqlfetchscroll/Parent.java @@ -0,0 +1,41 @@ +package org.hibernate.test.hqlfetchscroll; + +import java.util.HashSet; +import java.util.Set; + +public class Parent { + private String name; + private Set children = new HashSet(); + + Parent() { + } + + public Parent(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + + void setName(String name) { + this.name = name; + } + + public Set getChildren() { + return children; + } + + private void setChildren(Set children) { + this.children = children; + } + + public void addChild(Child child) { + children.add( child ); + } + + public String toString() { + return name; + } +} \ No newline at end of file diff --git a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/EntityWithInverseOneToManyJoinTest.java b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/EntityWithInverseOneToManyJoinTest.java index 43af64d78d..e05f4380c1 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/EntityWithInverseOneToManyJoinTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/EntityWithInverseOneToManyJoinTest.java @@ -23,16 +23,23 @@ */ package org.hibernate.test.immutable.entitywithmutablecollection.inverse; +import org.hibernate.dialect.CUBRIDDialect; import org.hibernate.test.immutable.entitywithmutablecollection.AbstractEntityWithOneToManyTest; import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.SkipForDialect; /** * @author Gail Badner */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class EntityWithInverseOneToManyJoinTest extends AbstractEntityWithOneToManyTest { @Override public String[] getMappings() { return new String[] { "immutable/entitywithmutablecollection/inverse/ContractVariationOneToManyJoin.hbm.xml" }; } -} \ No newline at end of file +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinFailureExpectedTest.java b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinFailureExpectedTest.java index 50cb262296..febdf48c10 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinFailureExpectedTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinFailureExpectedTest.java @@ -23,16 +23,22 @@ */ package org.hibernate.test.immutable.entitywithmutablecollection.inverse; -import org.junit.Test; - +import org.hibernate.dialect.CUBRIDDialect; import org.hibernate.test.immutable.entitywithmutablecollection.AbstractEntityWithOneToManyTest; import org.hibernate.testing.FailureExpected; import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.SkipForDialect; +import org.junit.Test; /** * @author Gail Badner */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class VersionedEntityWithInverseOneToManyJoinFailureExpectedTest extends AbstractEntityWithOneToManyTest { @Override public String[] getMappings() { @@ -88,4 +94,4 @@ public class VersionedEntityWithInverseOneToManyJoinFailureExpectedTest extends public void testRemoveOneToManyElementUsingMerge() { super.testRemoveOneToManyElementUsingMerge(); } -} \ No newline at end of file +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinTest.java b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinTest.java index 6c226a7349..c4ff242f6d 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/inverse/VersionedEntityWithInverseOneToManyJoinTest.java @@ -23,8 +23,10 @@ */ package org.hibernate.test.immutable.entitywithmutablecollection.inverse; +import org.hibernate.dialect.CUBRIDDialect; import org.hibernate.test.immutable.entitywithmutablecollection.AbstractEntityWithOneToManyTest; import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.SkipForDialect; import org.hibernate.testing.TestForIssue; @@ -33,6 +35,11 @@ import org.hibernate.testing.TestForIssue; */ @TestForIssue( jiraKey = "HHH-4992" ) @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class VersionedEntityWithInverseOneToManyJoinTest extends AbstractEntityWithOneToManyTest { @Override public String[] getMappings() { @@ -48,4 +55,4 @@ public class VersionedEntityWithInverseOneToManyJoinTest extends AbstractEntityW protected boolean checkUpdateCountsAfterRemovingElementWithoutDelete() { return false; } -} \ No newline at end of file +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/EntityWithNonInverseOneToManyJoinTest.java b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/EntityWithNonInverseOneToManyJoinTest.java index 10b1951bdf..6349eca204 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/EntityWithNonInverseOneToManyJoinTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/EntityWithNonInverseOneToManyJoinTest.java @@ -23,16 +23,23 @@ */ package org.hibernate.test.immutable.entitywithmutablecollection.noninverse; +import org.hibernate.dialect.CUBRIDDialect; import org.hibernate.test.immutable.entitywithmutablecollection.AbstractEntityWithOneToManyTest; import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.SkipForDialect; /** * @author Gail Badner */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class EntityWithNonInverseOneToManyJoinTest extends AbstractEntityWithOneToManyTest { @Override public String[] getMappings() { return new String[] { "immutable/entitywithmutablecollection/noninverse/ContractVariationOneToManyJoin.hbm.xml" }; } -} \ No newline at end of file +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/VersionedEntityWithNonInverseOneToManyJoinTest.java b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/VersionedEntityWithNonInverseOneToManyJoinTest.java index 24fb8e8e09..bbbd26d999 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/VersionedEntityWithNonInverseOneToManyJoinTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/VersionedEntityWithNonInverseOneToManyJoinTest.java @@ -23,16 +23,23 @@ */ package org.hibernate.test.immutable.entitywithmutablecollection.noninverse; +import org.hibernate.dialect.CUBRIDDialect; import org.hibernate.test.immutable.entitywithmutablecollection.AbstractEntityWithOneToManyTest; import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.SkipForDialect; /** * @author Gail Badner */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class VersionedEntityWithNonInverseOneToManyJoinTest extends AbstractEntityWithOneToManyTest { @Override public String[] getMappings() { return new String[] { "immutable/entitywithmutablecollection/noninverse/ContractVariationVersionedOneToManyJoin.hbm.xml" }; } -} \ No newline at end of file +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/keymanytoone/bidir/embedded/KeyManyToOneTest.java b/hibernate-core/src/test/java/org/hibernate/test/keymanytoone/bidir/embedded/KeyManyToOneTest.java index 350b3e302c..2a5be5753b 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/keymanytoone/bidir/embedded/KeyManyToOneTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/keymanytoone/bidir/embedded/KeyManyToOneTest.java @@ -22,17 +22,18 @@ * Boston, MA 02110-1301 USA */ package org.hibernate.test.keymanytoone.bidir.embedded; +import static org.junit.Assert.assertEquals; + import java.util.List; -import org.junit.Test; - +import org.hibernate.Criteria; import org.hibernate.Session; import org.hibernate.cfg.Configuration; import org.hibernate.cfg.Environment; +import org.hibernate.criterion.Restrictions; import org.hibernate.testing.FailureExpectedWithNewMetamodel; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; - -import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * @author Steve Ebersole @@ -50,6 +51,18 @@ public class KeyManyToOneTest extends BaseCoreFunctionalTestCase { cfg.setProperty( Environment.GENERATE_STATISTICS, "true" ); } + @Test + public void testCriteriaRestrictionOnKeyManyToOne() { + Session s = openSession(); + s.beginTransaction(); + s.createQuery( "from Order o where o.customer.name = 'Acme'" ).list(); + Criteria criteria = s.createCriteria( Order.class ); + criteria.createCriteria( "customer" ).add( Restrictions.eq( "name", "Acme" ) ); + criteria.list(); + s.getTransaction().commit(); + s.close(); + } + @Test public void testSaveCascadedToKeyManyToOne() { // test cascading a save to an association with a key-many-to-one which refers to a diff --git a/hibernate-core/src/test/java/org/hibernate/test/legacy/FooBarTest.java b/hibernate-core/src/test/java/org/hibernate/test/legacy/FooBarTest.java index d4d35ea7ea..5af456377e 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/legacy/FooBarTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/legacy/FooBarTest.java @@ -62,6 +62,7 @@ import org.hibernate.QueryException; import org.hibernate.ScrollableResults; import org.hibernate.Session; import org.hibernate.Transaction; +import org.hibernate.action.spi.BeforeTransactionCompletionProcess; import org.hibernate.criterion.Example; import org.hibernate.criterion.MatchMode; import org.hibernate.criterion.Order; @@ -82,6 +83,8 @@ import org.hibernate.dialect.SybaseASE15Dialect; import org.hibernate.dialect.SybaseDialect; import org.hibernate.dialect.TimesTenDialect; import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.event.spi.EventSource; import org.hibernate.internal.util.SerializationHelper; import org.hibernate.internal.util.collections.JoinedIterator; import org.hibernate.jdbc.AbstractReturningWork; @@ -91,6 +94,7 @@ import org.hibernate.testing.DialectChecks; import org.hibernate.testing.FailureExpectedWithNewMetamodel; import org.hibernate.testing.RequiresDialect; import org.hibernate.testing.RequiresDialectFeature; +import org.hibernate.testing.TestForIssue; import org.hibernate.testing.env.ConnectionProviderBuilder; import org.hibernate.type.StandardBasicTypes; import org.jboss.logging.Logger; @@ -3934,6 +3938,59 @@ public class FooBarTest extends LegacyTestCase { s.close(); } + @Test + @TestForIssue(jiraKey = "HHH-7603") + public void testLazyCollectionsTouchedDuringPreCommit() throws Exception { + Session s = openSession(); + s.beginTransaction(); + Qux q = new Qux(); + s.save( q ); + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + q = ( Qux ) s.load( Qux.class, q.getKey() ); + s.getTransaction().commit(); + + //clear the session + s.clear(); + + //now reload the proxy and delete it + s.beginTransaction(); + + final Qux qToDelete = ( Qux ) s.load( Qux.class, q.getKey() ); + + //register a pre commit process that will touch the collection and delete the entity + ( ( EventSource ) s ).getActionQueue().registerProcess( new BeforeTransactionCompletionProcess() { + @Override + public void doBeforeTransactionCompletion(SessionImplementor session) { + qToDelete.getFums().size(); + } + } ); + + s.delete( qToDelete ); + boolean ok = false; + try { + s.getTransaction().commit(); + } + catch (LazyInitializationException e) { + ok = true; + s.getTransaction().rollback(); + } + finally { + s.close(); + } + assertTrue( "lazy collection should have blown in the before trans completion", ok ); + + s = openSession(); + s.beginTransaction(); + q = ( Qux ) s.load( Qux.class, q.getKey() ); + s.delete( q ); + s.getTransaction().commit(); + s.close(); + } + @Test public void testNewSessionLifecycle() throws Exception { Session s = openSession(); diff --git a/hibernate-core/src/test/java/org/hibernate/test/lob/JpaLargeBlobTest.java b/hibernate-core/src/test/java/org/hibernate/test/lob/JpaLargeBlobTest.java new file mode 100644 index 0000000000..40e5f98942 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/lob/JpaLargeBlobTest.java @@ -0,0 +1,113 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors + * as indicated by the @authors tag. All rights reserved. + * See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This copyrighted material is made available to anyone wishing to use, + * modify, copy, or redistribute it subject to the terms and conditions + * of the GNU Lesser General Public License, v. 2.1. + * This program is distributed in the hope that it will be useful, but WITHOUT A + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. + * You should have received a copy of the GNU Lesser General Public License, + * v.2.1 along with this distribution; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, + * MA 02110-1301, USA. + */ +package org.hibernate.test.lob; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.Blob; +import java.util.Random; + +import org.hibernate.LobHelper; +import org.hibernate.Session; +import org.hibernate.cfg.Configuration; +import org.hibernate.cfg.Environment; +import org.hibernate.dialect.H2Dialect; +import org.hibernate.testing.RequiresDialect; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.junit.Test; + +/** + * @author Brett Meyer + */ +@TestForIssue( jiraKey = "HHH-7698" ) +@RequiresDialect( value = H2Dialect.class, jiraKey = "HHH-7724" ) +public class JpaLargeBlobTest extends BaseCoreFunctionalTestCase { + + @Override + protected Class[] getAnnotatedClasses() { + return new Class[] { LobEntity.class }; + } + + @Override + protected void configure(Configuration configuration) { + super.configure( configuration ); + configuration.setProperty(Environment.USE_STREAMS_FOR_BINARY, "true"); + } + + @Test + public void jpaBlobStream() throws Exception { + Session session = openSession(); + LobEntity o = new LobEntity(); + + LobHelper lh = session.getLobHelper(); + LobInputStream lis = new LobInputStream(); + + session.getTransaction().begin(); + + Blob blob = lh.createBlob(lis, LobEntity.BLOB_LENGTH); + o.setBlob(blob); + + // Regardless if NON_CONTEXTUAL_LOB_CREATION is set to true, + // ContextualLobCreator should use a NonContextualLobCreator to create + // a blob Proxy. If that's the case, the InputStream will not be read + // until it's persisted with the JDBC driver. + // Although HHH-7698 was about high memory consumption, this is the best + // way to test that the high memory use is being prevented. + assertFalse( lis.wasRead() ); + + session.persist(o); + session.getTransaction().commit(); + + assertTrue( lis.wasRead() ); + + session.close(); + + lis.close(); + } + + private class LobInputStream extends InputStream { + private boolean read = false; + private Long count = (long) 200 * 1024 * 1024; + + @Override + public int read() throws IOException { + read = true; + if (count > 0) { + count--; + return new Random().nextInt(); + } + return -1; + } + + @Override + public int available() throws IOException { + return 1; + } + + public boolean wasRead() { + return read; + } + } +} \ No newline at end of file diff --git a/hibernate-core/src/test/java/org/hibernate/test/lob/LobEntity.java b/hibernate-core/src/test/java/org/hibernate/test/lob/LobEntity.java new file mode 100644 index 0000000000..e66a27c1fe --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/lob/LobEntity.java @@ -0,0 +1,60 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors + * as indicated by the @authors tag. All rights reserved. + * See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This copyrighted material is made available to anyone wishing to use, + * modify, copy, or redistribute it subject to the terms and conditions + * of the GNU Lesser General Public License, v. 2.1. + * This program is distributed in the hope that it will be useful, but WITHOUT A + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. + * You should have received a copy of the GNU Lesser General Public License, + * v.2.1 along with this distribution; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, + * MA 02110-1301, USA. + */ +package org.hibernate.test.lob; + +import java.sql.Blob; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.Lob; + +/** + * @author Brett Meyer + */ +@Entity +public class LobEntity { + public static final int BLOB_LENGTH = 100000000; + private Long id; + + @Id + @GeneratedValue + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + @Lob + @Column(length = BLOB_LENGTH) + private Blob blob; + + public Blob getBlob() { + return blob; + } + + public void setBlob(Blob blob) { + this.blob = blob; + } +} \ No newline at end of file diff --git a/hibernate-core/src/test/java/org/hibernate/test/manytomany/batchload/BatchedManyToManyTest.java b/hibernate-core/src/test/java/org/hibernate/test/manytomany/batchload/BatchedManyToManyTest.java index 43e64364d9..ef4a5c5c8f 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/manytomany/batchload/BatchedManyToManyTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/manytomany/batchload/BatchedManyToManyTest.java @@ -23,10 +23,12 @@ */ package org.hibernate.test.manytomany.batchload; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + import java.util.List; import junit.framework.Assert; -import org.junit.Test; import org.hibernate.EmptyInterceptor; import org.hibernate.Hibernate; @@ -39,15 +41,10 @@ import org.hibernate.engine.jdbc.batch.internal.NonBatchingBatch; import org.hibernate.engine.jdbc.batch.spi.Batch; import org.hibernate.engine.jdbc.batch.spi.BatchKey; import org.hibernate.engine.jdbc.spi.JdbcCoordinator; -import org.hibernate.loader.collection.BatchingCollectionInitializer; -import org.hibernate.persister.collection.AbstractCollectionPersister; import org.hibernate.stat.CollectionStatistics; import org.hibernate.testing.FailureExpectedWithNewMetamodel; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; - -import static org.hibernate.testing.junit4.ExtraAssertions.assertClassAssignability; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * Tests loading of many-to-many collection which should trigger @@ -82,25 +79,6 @@ public class BatchedManyToManyTest extends BaseCoreFunctionalTestCase { } } - @Test - public void testProperLoaderSetup() { - AbstractCollectionPersister cp = ( AbstractCollectionPersister ) - sessionFactory().getCollectionPersister( User.class.getName() + ".groups" ); - assertClassAssignability( BatchingCollectionInitializer.class, cp.getInitializer().getClass() ); - BatchingCollectionInitializer initializer = ( BatchingCollectionInitializer ) cp.getInitializer(); - assertEquals( 50, findMaxBatchSize( initializer.getBatchSizes() ) ); - } - - private int findMaxBatchSize(int[] batchSizes) { - int max = 0; - for ( int size : batchSizes ) { - if ( size > max ) { - max = size; - } - } - return max; - } - @Test public void testLoadingNonInverseSide() { prepareTestData(); diff --git a/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/ManyToManyAssociationClassCompositeIdTest.java b/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/ManyToManyAssociationClassCompositeIdTest.java index a3f56338ec..7c503108bb 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/ManyToManyAssociationClassCompositeIdTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/ManyToManyAssociationClassCompositeIdTest.java @@ -1,47 +1,60 @@ -/* - * Hibernate, Relational Persistence for Idiomatic Java - * - * Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as - * indicated by the @author tags or express copyright attribution - * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Inc. - * - * This copyrighted material is made available to anyone wishing to use, modify, - * copy, or redistribute it subject to the terms and conditions of the GNU - * Lesser General Public License, as published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License - * for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this distribution; if not, write to: - * Free Software Foundation, Inc. - * 51 Franklin Street, Fifth Floor - * Boston, MA 02110-1301 USA - */ -package org.hibernate.test.manytomanyassociationclass.compositeid; +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as + * indicated by the @author tags or express copyright attribution + * statements applied by the authors. All third-party contributions are + * distributed under license by Red Hat Inc. + * + * This copyrighted material is made available to anyone wishing to use, modify, + * copy, or redistribute it subject to the terms and conditions of the GNU + * Lesser General Public License, as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this distribution; if not, write to: + * Free Software Foundation, Inc. + * 51 Franklin Street, Fifth Floor + * Boston, MA 02110-1301 USA + */ +package org.hibernate.test.manytomanyassociationclass.compositeid; + +import org.hibernate.test.manytomanyassociationclass.AbstractManyToManyAssociationClassTest; +import org.hibernate.test.manytomanyassociationclass.Group; +import org.hibernate.test.manytomanyassociationclass.Membership; +import org.hibernate.test.manytomanyassociationclass.User; +import org.hibernate.testing.FailureExpectedWithNewMetamodel; + +/** + * Tests on many-to-many association using an association class with a composite ID containing + * the IDs from the associated entities. + * + * @author Gail Badner + */ +@FailureExpectedWithNewMetamodel +public class ManyToManyAssociationClassCompositeIdTest extends AbstractManyToManyAssociationClassTest { + @Override + public String[] getMappings() { + return new String[] { "manytomanyassociationclass/compositeid/Mappings.hbm.xml" }; + } + + @Override + public Membership createMembership( String name ) { + return new MembershipWithCompositeId( name ); + } + + @Override + public void deleteMembership(User u, Group g, Membership ug) { + if ( u == null || g == null ) { + throw new IllegalArgumentException(); + } + u.getMemberships().remove( ug ); + g.getMemberships().remove( ug ); + ug.setId(null); + } -import org.hibernate.test.manytomanyassociationclass.AbstractManyToManyAssociationClassTest; -import org.hibernate.test.manytomanyassociationclass.Membership; -import org.hibernate.testing.FailureExpectedWithNewMetamodel; - -/** - * Tests on many-to-many association using an association class with a composite ID containing - * the IDs from the associated entities. - * - * @author Gail Badner - */ -@FailureExpectedWithNewMetamodel -public class ManyToManyAssociationClassCompositeIdTest extends AbstractManyToManyAssociationClassTest { - @Override - public String[] getMappings() { - return new String[] { "manytomanyassociationclass/compositeid/Mappings.hbm.xml" }; - } - - @Override - public Membership createMembership( String name ) { - return new MembershipWithCompositeId( name ); - } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/MembershipWithCompositeId.java b/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/MembershipWithCompositeId.java index a6aa2c7893..3f07d58861 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/MembershipWithCompositeId.java +++ b/hibernate-core/src/test/java/org/hibernate/test/manytomanyassociationclass/compositeid/MembershipWithCompositeId.java @@ -64,19 +64,34 @@ public class MembershipWithCompositeId extends Membership { this.groupId = groupId; } - public boolean equals(Object o) { - if ( o != null && o instanceof Id ) { - Id that = ( Id ) o; - return this.userId.equals( that.userId ) && - this.groupId.equals( that.groupId ); - } - else { + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) return false; - } + if (getClass() != obj.getClass()) + return false; + Id other = (Id) obj; + if (userId == null) { + if (other.userId != null) + return false; + } else if (!userId.equals(other.userId)) + return false; + if (groupId == null) { + if (other.groupId != null) + return false; + } else if (!groupId.equals(other.groupId)) + return false; + return true; } public int hashCode() { - return userId.hashCode() + groupId.hashCode(); + final int prime = 31; + int result = 1; + result = prime * result + ((userId == null) ? 0 : userId.hashCode()); + result = prime * result + + ((groupId == null) ? 0 : groupId.hashCode()); + return result; } } @@ -89,11 +104,17 @@ public class MembershipWithCompositeId extends Membership { } public void setGroup(Group group) { + if (getId() == null) { + setId(new Id()); + } ( (Id) getId() ).setGroupId( ( group == null ? null : group.getId() ) ); super.setGroup( group ); } public void setUser(User user) { + if (getId() == null) { + setId(new Id()); + } ( (Id) getId() ).setUserId( user == null ? null : user.getId() ); super.setUser( user ); } diff --git a/hibernate-core/src/test/java/org/hibernate/test/math/MathEntity.java b/hibernate-core/src/test/java/org/hibernate/test/math/MathEntity.java new file mode 100644 index 0000000000..ab893d938c --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/math/MathEntity.java @@ -0,0 +1,48 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors + * as indicated by the @authors tag. All rights reserved. + * See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This copyrighted material is made available to anyone wishing to use, + * modify, copy, or redistribute it subject to the terms and conditions + * of the GNU Lesser General Public License, v. 2.1. + * This program is distributed in the hope that it will be useful, but WITHOUT A + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. + * You should have received a copy of the GNU Lesser General Public License, + * v.2.1 along with this distribution; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, + * MA 02110-1301, USA. + */ +package org.hibernate.test.math; + +/** + * @author Brett Meyer + */ +public class MathEntity { + + private Long id; + + private int value; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/math/MathTest.java b/hibernate-core/src/test/java/org/hibernate/test/math/MathTest.java new file mode 100644 index 0000000000..a1b2a1a059 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/math/MathTest.java @@ -0,0 +1,67 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors + * as indicated by the @authors tag. All rights reserved. + * See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This copyrighted material is made available to anyone wishing to use, + * modify, copy, or redistribute it subject to the terms and conditions + * of the GNU Lesser General Public License, v. 2.1. + * This program is distributed in the hope that it will be useful, but WITHOUT A + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. + * You should have received a copy of the GNU Lesser General Public License, + * v.2.1 along with this distribution; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, + * MA 02110-1301, USA. + */ +package org.hibernate.test.math; + +import static org.junit.Assert.assertEquals; + +import org.hibernate.Session; +import org.hibernate.dialect.H2Dialect; +import org.hibernate.dialect.Oracle8iDialect; +import org.hibernate.testing.RequiresDialect; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.junit.Test; + +/** + * @author Brett Meyer + */ +@RequiresDialect( value = { Oracle8iDialect.class, H2Dialect.class } ) +public class MathTest extends BaseCoreFunctionalTestCase { + + @Override + public String[] getMappings() { + return new String[]{"math/Math.hbm.xml"}; + } + + @Test + public void testBitAnd() { + MathEntity me = new MathEntity(); + me.setValue( 5 ); + + Session s = openSession(); + s.beginTransaction(); + Long id = (Long) s.save( me ); + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + int value1 = ((Integer) s.createQuery( "select bitand(m.value,0) from MathEntity m where m.id=" + id ).uniqueResult()).intValue(); + int value2 = ((Integer) s.createQuery( "select bitand(m.value,2) from MathEntity m where m.id=" + id ).uniqueResult()).intValue(); + int value3 = ((Integer )s.createQuery( "select bitand(m.value,3) from MathEntity m where m.id=" + id ).uniqueResult()).intValue(); + s.getTransaction().commit(); + s.close(); + + assertEquals(value1, 0); + assertEquals(value2, 0); + assertEquals(value3, 1); + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/MutableNaturalIdTest.java b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/MutableNaturalIdTest.java index d3ff172adc..3e7234136e 100755 --- a/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/MutableNaturalIdTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/MutableNaturalIdTest.java @@ -475,4 +475,58 @@ public class MutableNaturalIdTest extends BaseCoreFunctionalTestCase { t.commit(); s.close(); } + + @Test + public void testClear() { + Session s = openSession(); + s.beginTransaction(); + User u = new User( "steve", "hb", "superSecret" ); + s.persist( u ); + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + u = (User) session.byNaturalId( User.class ) + .using( "name", "steve" ) + .using( "org", "hb" ) + .load(); + assertNotNull( u ); + s.clear(); + u = (User) session.byNaturalId( User.class ) + .using( "name", "steve" ) + .using( "org", "hb" ) + .load(); + assertNotNull( u ); + s.delete( u ); + s.getTransaction().commit(); + s.close(); + } + + @Test + public void testEviction() { + Session s = openSession(); + s.beginTransaction(); + User u = new User( "steve", "hb", "superSecret" ); + s.persist( u ); + s.getTransaction().commit(); + s.close(); + + s = openSession(); + s.beginTransaction(); + u = (User) session.byNaturalId( User.class ) + .using( "name", "steve" ) + .using( "org", "hb" ) + .load(); + assertNotNull( u ); + s.evict( u ); + u = (User) session.byNaturalId( User.class ) + .using( "name", "steve" ) + .using( "org", "hb" ) + .load(); + assertNotNull( u ); + s.delete( u ); + s.getTransaction().commit(); + s.close(); + } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdNonStrictReadWriteTest.java b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdNonStrictReadWriteTest.java new file mode 100644 index 0000000000..722e29ba4c --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdNonStrictReadWriteTest.java @@ -0,0 +1,14 @@ +package org.hibernate.test.naturalid.mutable.cached; + +import org.hibernate.cfg.Configuration; +import org.hibernate.testing.cache.CachingRegionFactory; + +public class CachedMutableNaturalIdNonStrictReadWriteTest extends + CachedMutableNaturalIdTest { + + @Override + public void configure(Configuration cfg) { + super.configure(cfg); + cfg.setProperty( CachingRegionFactory.DEFAULT_ACCESSTYPE, "nonstrict-read-write" ); + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdStrictReadWriteTest.java b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdStrictReadWriteTest.java new file mode 100644 index 0000000000..82e7c49c24 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdStrictReadWriteTest.java @@ -0,0 +1,154 @@ +package org.hibernate.test.naturalid.mutable.cached; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import org.hibernate.Session; +import org.hibernate.cfg.Configuration; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.cache.CachingRegionFactory; +import org.junit.Test; + +public class CachedMutableNaturalIdStrictReadWriteTest extends + CachedMutableNaturalIdTest { + + @Override + public void configure(Configuration cfg) { + super.configure(cfg); + cfg.setProperty( CachingRegionFactory.DEFAULT_ACCESSTYPE, "read-write" ); + } + + @Test + @TestForIssue( jiraKey = "HHH-7278" ) + public void testInsertedNaturalIdCachedAfterTransactionSuccess() { + + Session session = openSession(); + session.getSessionFactory().getStatistics().clear(); + session.beginTransaction(); + Another it = new Another( "it"); + session.save( it ); + session.flush(); + session.getTransaction().commit(); + session.close(); + + session = openSession(); + session.beginTransaction(); + it = (Another) session.bySimpleNaturalId(Another.class).load("it"); + assertNotNull(it); + session.delete(it); + session.getTransaction().commit(); + assertEquals(1, session.getSessionFactory().getStatistics().getNaturalIdCacheHitCount()); + } + + @Test + @TestForIssue( jiraKey = "HHH-7278" ) + public void testInsertedNaturalIdNotCachedAfterTransactionFailure() { + + Session session = openSession(); + session.getSessionFactory().getStatistics().clear(); + session.beginTransaction(); + Another it = new Another( "it"); + session.save( it ); + session.flush(); + session.getTransaction().rollback(); + session.close(); + + session = openSession(); + session.beginTransaction(); + it = (Another) session.bySimpleNaturalId(Another.class).load("it"); + assertNull(it); + assertEquals(0, session.getSessionFactory().getStatistics().getNaturalIdCacheHitCount()); + } + + @Test + @TestForIssue( jiraKey = "HHH-7278" ) + public void testChangedNaturalIdCachedAfterTransactionSuccess() { + Session session = openSession(); + session.beginTransaction(); + Another it = new Another( "it"); + session.save( it ); + session.getTransaction().commit(); + session.close(); + + session = openSession(); + session.beginTransaction(); + it = (Another) session.bySimpleNaturalId(Another.class).load("it"); + assertNotNull(it); + + it.setName("modified"); + session.flush(); + session.getTransaction().commit(); + session.close(); + + session.getSessionFactory().getStatistics().clear(); + + session = openSession(); + session.beginTransaction(); + it = (Another) session.bySimpleNaturalId(Another.class).load("modified"); + assertNotNull(it); + session.delete(it); + session.getTransaction().commit(); + session.close(); + + assertEquals(1, session.getSessionFactory().getStatistics().getNaturalIdCacheHitCount()); + } + + @Test + @TestForIssue( jiraKey = "HHH-7278" ) + public void testChangedNaturalIdNotCachedAfterTransactionFailure() { + Session session = openSession(); + session.beginTransaction(); + Another it = new Another( "it"); + session.save( it ); + session.getTransaction().commit(); + session.close(); + + session = openSession(); + session.beginTransaction(); + it = (Another) session.bySimpleNaturalId(Another.class).load("it"); + assertNotNull(it); + + it.setName("modified"); + session.flush(); + session.getTransaction().rollback(); + session.close(); + + session.getSessionFactory().getStatistics().clear(); + + session = openSession(); + session.beginTransaction(); + it = (Another) session.bySimpleNaturalId(Another.class).load("modified"); + assertNull(it); + it = (Another) session.bySimpleNaturalId(Another.class).load("it"); + session.delete(it); + session.getTransaction().commit(); + session.close(); + + assertEquals(0, session.getSessionFactory().getStatistics().getNaturalIdCacheHitCount()); + } + + @Test + @TestForIssue( jiraKey = "HHH-7309" ) + public void testInsertUpdateEntity_NaturalIdCachedAfterTransactionSuccess() { + + Session session = openSession(); + session.getSessionFactory().getStatistics().clear(); + session.beginTransaction(); + Another it = new Another( "it"); + session.save( it ); // schedules an InsertAction + it.setSurname("1234"); // schedules an UpdateAction, without bug-fix + // this will re-cache natural-id with identical key and at same time invalidate it + session.flush(); + session.getTransaction().commit(); + session.close(); + + session = openSession(); + session.beginTransaction(); + it = (Another) session.bySimpleNaturalId(Another.class).load("it"); + assertNotNull(it); + session.delete(it); + session.getTransaction().commit(); + assertEquals("In a strict access strategy we would excpect a hit here", 1, session.getSessionFactory().getStatistics().getNaturalIdCacheHitCount()); + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdTest.java b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdTest.java index fea222a64d..a739651f7a 100755 --- a/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/naturalid/mutable/cached/CachedMutableNaturalIdTest.java @@ -43,7 +43,7 @@ import static org.junit.Assert.assertNull; * @author Guenther Demetz * @author Steve Ebersole */ -public class CachedMutableNaturalIdTest extends BaseCoreFunctionalTestCase { +public abstract class CachedMutableNaturalIdTest extends BaseCoreFunctionalTestCase { @Override protected Class[] getAnnotatedClasses() { return new Class[] {Another.class, AllCached.class}; diff --git a/hibernate-core/src/test/java/org/hibernate/test/onetomany/OneToManyTest.java b/hibernate-core/src/test/java/org/hibernate/test/onetomany/OneToManyTest.java index 83504d7987..fecd7263c3 100755 --- a/hibernate-core/src/test/java/org/hibernate/test/onetomany/OneToManyTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/onetomany/OneToManyTest.java @@ -23,6 +23,8 @@ */ package org.hibernate.test.onetomany; +import org.hibernate.dialect.CUBRIDDialect; +import org.hibernate.testing.SkipForDialect; import org.junit.Test; import org.hibernate.Session; @@ -44,6 +46,11 @@ public class OneToManyTest extends BaseCoreFunctionalTestCase { @SuppressWarnings( {"unchecked", "UnusedAssignment"}) @Test + @SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" + ) public void testOneToManyLinkTable() { Session s = openSession(); Transaction t = s.beginTransaction(); diff --git a/hibernate-core/src/test/java/org/hibernate/test/orphan/one2one/fk/reversed/unidirectional/DeleteOneToOneOrphansTest.java b/hibernate-core/src/test/java/org/hibernate/test/orphan/one2one/fk/reversed/unidirectional/DeleteOneToOneOrphansTest.java index 3545a8baec..fee3241b5f 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/orphan/one2one/fk/reversed/unidirectional/DeleteOneToOneOrphansTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/orphan/one2one/fk/reversed/unidirectional/DeleteOneToOneOrphansTest.java @@ -23,19 +23,18 @@ */ package org.hibernate.test.orphan.one2one.fk.reversed.unidirectional; -import java.util.List; - -import org.junit.Test; - -import org.hibernate.Session; -import org.hibernate.testing.FailureExpected; -import org.hibernate.testing.FailureExpectedWithNewMetamodel; -import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import java.util.List; + +import org.hibernate.Session; +import org.hibernate.testing.FailureExpectedWithNewMetamodel; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; +import org.junit.Test; + /** * @author Steve Ebersole */ @@ -96,7 +95,7 @@ public class DeleteOneToOneOrphansTest extends BaseCoreFunctionalTestCase { } @Test - @FailureExpected( jiraKey = "unknown" ) + @TestForIssue( jiraKey = "HHH-5267" ) public void testOrphanedWhileDetached() { createData(); @@ -127,8 +126,11 @@ public class DeleteOneToOneOrphansTest extends BaseCoreFunctionalTestCase { session.beginTransaction(); emp = ( Employee ) session.get( Employee.class, emp.getId() ); assertNull( emp.getInfo() ); - results = session.createQuery( "from EmployeeInfo" ).list(); - assertEquals( 0, results.size() ); + // TODO: If merge was used instead of saveOrUpdate, this would work. + // However, re-attachment does not currently support handling orphans. + // See HHH-3795 +// results = session.createQuery( "from EmployeeInfo" ).list(); +// assertEquals( 0, results.size() ); results = session.createQuery( "from Employee" ).list(); assertEquals( 1, results.size() ); session.getTransaction().commit(); @@ -136,4 +138,4 @@ public class DeleteOneToOneOrphansTest extends BaseCoreFunctionalTestCase { cleanupData(); } -} \ No newline at end of file +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/querycache/CompositeKey.java b/hibernate-core/src/test/java/org/hibernate/test/querycache/CompositeKey.java new file mode 100644 index 0000000000..565d8a4356 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/querycache/CompositeKey.java @@ -0,0 +1,49 @@ +package org.hibernate.test.querycache; + +import java.io.Serializable; + +import javax.persistence.Embeddable; + +@Embeddable +public class CompositeKey implements Serializable { + + private static final long serialVersionUID = 7950910288405475131L; + + public int a; + + public int b; + + public CompositeKey() { + } + + public CompositeKey(int a, int b) { + this.a = a; + this.b = b; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + a; + result = prime * result + b; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + CompositeKey other = (CompositeKey) obj; + if (a != other.a) + return false; + if (b != other.b) + return false; + return true; + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/querycache/EntityWithCompositeKey.java b/hibernate-core/src/test/java/org/hibernate/test/querycache/EntityWithCompositeKey.java new file mode 100644 index 0000000000..7e91bcfb7b --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/querycache/EntityWithCompositeKey.java @@ -0,0 +1,19 @@ +package org.hibernate.test.querycache; + +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; + +@Entity +public class EntityWithCompositeKey { + + @EmbeddedId + public CompositeKey pk; + + public EntityWithCompositeKey() { + } + + public EntityWithCompositeKey(CompositeKey pk) { + this.pk = pk; + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/querycache/EntityWithStringCompositeKey.java b/hibernate-core/src/test/java/org/hibernate/test/querycache/EntityWithStringCompositeKey.java new file mode 100644 index 0000000000..6b3dfd7576 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/querycache/EntityWithStringCompositeKey.java @@ -0,0 +1,23 @@ +package org.hibernate.test.querycache; + +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; + +import org.hibernate.annotations.Cache; +import org.hibernate.annotations.CacheConcurrencyStrategy; + +@Entity +@Cache(usage = CacheConcurrencyStrategy.READ_WRITE) +public class EntityWithStringCompositeKey { + + private StringCompositeKey pk; + + @EmbeddedId + public StringCompositeKey getPk() { + return pk; + } + + public void setPk(StringCompositeKey pk) { + this.pk = pk; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/querycache/QueryCacheTest.java b/hibernate-core/src/test/java/org/hibernate/test/querycache/QueryCacheTest.java index a64051dde7..a33aa1cca0 100755 --- a/hibernate-core/src/test/java/org/hibernate/test/querycache/QueryCacheTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/querycache/QueryCacheTest.java @@ -23,17 +23,21 @@ */ package org.hibernate.test.querycache; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.junit.Test; - +import org.hibernate.Criteria; import org.hibernate.Hibernate; +import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.cfg.Configuration; import org.hibernate.cfg.Environment; +import org.hibernate.criterion.Restrictions; import org.hibernate.stat.EntityStatistics; import org.hibernate.stat.QueryStatistics; import org.hibernate.testing.DialectChecks; @@ -41,19 +45,31 @@ import org.hibernate.testing.RequiresDialectFeature; import org.hibernate.testing.TestForIssue; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.transform.Transformers; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * @author Gavin King + * @author Brett Meyer */ public class QueryCacheTest extends BaseCoreFunctionalTestCase { + + private static final CompositeKey PK = new CompositeKey(1, 2); + @Override public String[] getMappings() { return new String[] { "querycache/Item.hbm.xml" }; } + @Override + protected Class[] getAnnotatedClasses() { + return new Class[] { + CompositeKey.class, + EntityWithCompositeKey.class, + StringCompositeKey.class, + EntityWithStringCompositeKey.class + }; + } + @Override public void configure(Configuration cfg) { super.configure( cfg ); @@ -431,6 +447,52 @@ public class QueryCacheTest extends BaseCoreFunctionalTestCase { assertEquals( qs.getExecutionCount(), 3 ); assertEquals( es.getFetchCount(), 0 ); //check that it was being cached } + + @Test + @TestForIssue( jiraKey = "HHH-4459" ) + public void testGetByCompositeId() { + Session s = openSession(); + s.beginTransaction(); + s.persist( new EntityWithCompositeKey( PK ) ); + Query query = s.createQuery( "FROM EntityWithCompositeKey e WHERE e.pk = :pk" ); + query.setCacheable( true ); + query.setParameter( "pk", PK ); + assertEquals(1, query.list().size( )); + s.getTransaction().rollback(); + s.close(); + + s = openSession(); + s.beginTransaction(); + EntityWithStringCompositeKey entity = new EntityWithStringCompositeKey(); + StringCompositeKey key = new StringCompositeKey(); + key.setAnalog( "foo1" ); + key.setDevice( "foo2" ); + key.setDeviceType( "foo3" ); + key.setSubstation( "foo4" ); + entity.setPk( key ); + s.persist( entity ); + Criteria c = s.createCriteria( + EntityWithStringCompositeKey.class ).add( Restrictions.eq( + "pk", key ) ); + c.setCacheable( true ); + assertEquals( 1, c.list().size() ); + s.getTransaction().rollback(); + s.close(); + } + +// @Test +// public void testGetByCompositeIdNoCache() { +// Query query = em.createQuery("FROM EntityWithCompositeKey e WHERE e.pk = :pk"); +// query.setParameter("pk", PK); +// assertEquals(1, query.getResultList().size()); +// } +// +// @Test +// public void testGetByEntityIself() { +// Query query = em.createQuery("FROM EntityWithCompositeKey e WHERE e = :ent"); +// query.setParameter("ent", new EntityWithCompositeKey(PK)); +// assertEquals(1, query.getResultList().size()); +// } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/querycache/StringCompositeKey.java b/hibernate-core/src/test/java/org/hibernate/test/querycache/StringCompositeKey.java new file mode 100644 index 0000000000..55c4a138c3 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/querycache/StringCompositeKey.java @@ -0,0 +1,51 @@ +package org.hibernate.test.querycache; + +import java.io.Serializable; + +import javax.persistence.Embeddable; + +@Embeddable +public class StringCompositeKey implements Serializable { + + private static final long serialVersionUID = 1L; + + private String substation; + + private String deviceType; + + private String device; + + public String getSubstation() { + return substation; + } + + public void setSubstation(String substation) { + this.substation = substation; + } + + public String getDeviceType() { + return deviceType; + } + + public void setDeviceType(String deviceType) { + this.deviceType = deviceType; + } + + public String getDevice() { + return device; + } + + public void setDevice(String device) { + this.device = device; + } + + public String getAnalog() { + return analog; + } + + public void setAnalog(String analog) { + this.analog = analog; + } + + private String analog; +} \ No newline at end of file diff --git a/hibernate-core/src/test/java/org/hibernate/test/schemaupdate/SchemaExportTest.java b/hibernate-core/src/test/java/org/hibernate/test/schemaupdate/SchemaExportTest.java index a0d55002fd..8c31fdbc7f 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/schemaupdate/SchemaExportTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/schemaupdate/SchemaExportTest.java @@ -36,6 +36,7 @@ import org.hibernate.testing.junit4.BaseUnitTestCase; import org.hibernate.tool.hbm2ddl.SchemaExport; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; /** * @author Gail Badner @@ -110,6 +111,25 @@ public abstract class SchemaExportTest extends BaseUnitTestCase { assertEquals( 0, schemaExport.getExceptions().size() ); } + @Test + public void testGenerateDdlToFile() { + Configuration cfg = new Configuration(); + cfg.addResource( MAPPING ); + SchemaExport schemaExport = createSchemaExport( cfg ); + java.io.File outFile = new java.io.File("schema.ddl"); + schemaExport.setOutputFile(outFile.getPath()); + // do not script to console or export to database + schemaExport.execute( false, false, false, true ); + if ( doesDialectSupportDropTableIfExist() + && schemaExport.getExceptions().size() > 0 ) { + assertEquals( 2, schemaExport.getExceptions().size() ); + } + assertTrue( outFile.exists() ); + //check file is not empty + assertTrue( outFile.length() > 0 ); + outFile.delete(); + } + @Test public void testCreateAndDrop() { Configuration cfg = new Configuration(); @@ -130,4 +150,4 @@ public abstract class SchemaExportTest extends BaseUnitTestCase { schemaExport.drop( true, true ); assertEquals( 0, schemaExport.getExceptions().size() ); } -} \ No newline at end of file +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/sql/hand/query/NativeSQLQueriesTest.java b/hibernate-core/src/test/java/org/hibernate/test/sql/hand/query/NativeSQLQueriesTest.java index dc76f69637..41aac10fbd 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/sql/hand/query/NativeSQLQueriesTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/sql/hand/query/NativeSQLQueriesTest.java @@ -539,7 +539,7 @@ public class NativeSQLQueriesTest extends BaseCoreFunctionalTestCase { assertEquals(1, list.size()); m = (Map) list.get(0); assertTrue(m.containsKey("EMPID")); - assertTrue(m.containsKey("VALUE")); + assertTrue(m.containsKey("AMOUNT")); assertTrue(m.containsKey("ENDDATE")); assertEquals(8, m.size()); @@ -653,7 +653,7 @@ public class NativeSQLQueriesTest extends BaseCoreFunctionalTestCase { " emp.STARTDATE as startDate," + " emp.ENDDATE as endDate," + " emp.REGIONCODE as regionCode," + - " emp.VALUE as VALUE," + + " emp.AMOUNT as AMOUNT," + " emp.CURRENCY as CURRENCY" + " FROM ORGANIZATION org" + " LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER"; @@ -681,7 +681,7 @@ public class NativeSQLQueriesTest extends BaseCoreFunctionalTestCase { .addProperty( "element.endDate", "endDate" ) .addProperty( "element.regionCode", "regionCode" ) .addProperty( "element.employmentId", "empId" ) - .addProperty( "element.salary" ).addColumnAlias( "VALUE" ).addColumnAlias( "CURRENCY" ); + .addProperty( "element.salary" ).addColumnAlias( "AMOUNT" ).addColumnAlias( "CURRENCY" ); sqlQuery.list(); // lets try a totally different approach now and pull back scalars, first with explicit types @@ -693,7 +693,7 @@ public class NativeSQLQueriesTest extends BaseCoreFunctionalTestCase { .addScalar( "endDate", TimestampType.INSTANCE ) .addScalar( "regionCode", StringType.INSTANCE ) .addScalar( "empId", LongType.INSTANCE ) - .addScalar( "VALUE", FloatType.INSTANCE ) + .addScalar( "AMOUNT", FloatType.INSTANCE ) .addScalar( "CURRENCY", StringType.INSTANCE ); diff --git a/hibernate-core/src/test/java/org/hibernate/test/subclassfilter/JoinedSubclassFilterTest.java b/hibernate-core/src/test/java/org/hibernate/test/subclassfilter/JoinedSubclassFilterTest.java index 32af56d829..f28833c904 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/subclassfilter/JoinedSubclassFilterTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/subclassfilter/JoinedSubclassFilterTest.java @@ -28,6 +28,8 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; +import org.hibernate.dialect.CUBRIDDialect; +import org.hibernate.testing.SkipForDialect; import org.junit.Test; import org.hibernate.Session; @@ -41,6 +43,11 @@ import static org.junit.Assert.assertEquals; * @author Steve Ebersole */ @FailureExpectedWithNewMetamodel +@SkipForDialect( + value = CUBRIDDialect.class, + comment = "As of verion 8.4.1 CUBRID doesn't support temporary tables. This test fails with" + + "HibernateException: cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" +) public class JoinedSubclassFilterTest extends BaseCoreFunctionalTestCase { @Override public final String[] getMappings() { diff --git a/hibernate-core/src/test/java/org/hibernate/test/tm/TransactionTimeoutTest.java b/hibernate-core/src/test/java/org/hibernate/test/tm/TransactionTimeoutTest.java index 704cb95de4..666a71a508 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/tm/TransactionTimeoutTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/tm/TransactionTimeoutTest.java @@ -77,7 +77,7 @@ public class TransactionTimeoutTest extends BaseCoreFunctionalTestCase { public void testTransactionTimeoutSuccess() { Session session = openSession(); Transaction transaction = session.getTransaction(); - transaction.setTimeout( 2 ); + transaction.setTimeout( 5 ); transaction.begin(); session.persist( new Person( "Lukasz", "Antoniak" ) ); transaction.commit(); diff --git a/hibernate-core/src/test/java/org/hibernate/test/typeoverride/TypeOverrideTest.java b/hibernate-core/src/test/java/org/hibernate/test/typeoverride/TypeOverrideTest.java index 9ff89bfcfd..f0b84dc597 100644 --- a/hibernate-core/src/test/java/org/hibernate/test/typeoverride/TypeOverrideTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/typeoverride/TypeOverrideTest.java @@ -33,6 +33,7 @@ import org.hibernate.cfg.Configuration; import org.hibernate.dialect.Dialect; import org.hibernate.dialect.PostgreSQL81Dialect; import org.hibernate.dialect.SybaseASE15Dialect; +import org.hibernate.dialect.SybaseDialect; import org.hibernate.testing.SkipForDialect; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.type.descriptor.sql.BlobTypeDescriptor; @@ -60,17 +61,16 @@ public class TypeOverrideTest extends BaseCoreFunctionalTestCase { // no override assertSame( IntegerTypeDescriptor.INSTANCE, remapSqlTypeDescriptor( IntegerTypeDescriptor.INSTANCE ) ); - // override depends on Dialect.useInputStreamToInsertBlob(); - // Postgresql explicitly overrides BlobTypeDescriptor.DEFAULT - if ( getDialect().useInputStreamToInsertBlob() ) { + // A few dialects explicitly override BlobTypeDescriptor.DEFAULT + if ( PostgreSQL81Dialect.class.isInstance( getDialect() ) ) { assertSame( - BlobTypeDescriptor.STREAM_BINDING, + BlobTypeDescriptor.BLOB_BINDING, getDialect().remapSqlTypeDescriptor( BlobTypeDescriptor.DEFAULT ) ); } - else if ( PostgreSQL81Dialect.class.isInstance( getDialect() ) ) { + else if (SybaseDialect.class.isInstance( getDialect() )) { assertSame( - BlobTypeDescriptor.BLOB_BINDING, + BlobTypeDescriptor.PRIMITIVE_ARRAY_BINDING, getDialect().remapSqlTypeDescriptor( BlobTypeDescriptor.DEFAULT ) ); } diff --git a/hibernate-core/src/test/java/org/hibernate/test/unidir/BackrefTest.java b/hibernate-core/src/test/java/org/hibernate/test/unidir/BackrefTest.java index 37dbbccafd..8c403e79a1 100755 --- a/hibernate-core/src/test/java/org/hibernate/test/unidir/BackrefTest.java +++ b/hibernate-core/src/test/java/org/hibernate/test/unidir/BackrefTest.java @@ -38,12 +38,19 @@ import static org.junit.Assert.assertFalse; */ public class BackrefTest extends BaseCoreFunctionalTestCase { @Override - public String[] getMappings() { + protected String[] getMappings() { return new String[] { "unidir/ParentChild.hbm.xml" }; } + + @Override + protected Class[] getAnnotatedClasses() { + // No test needed at this time. This was purely to test a + // validation issue from HHH-5836. + return new Class[] { Parent1.class, Child1.class, Child2.class }; + } @Override - public String getCacheConcurrencyStrategy() { + protected String getCacheConcurrencyStrategy() { return null; } diff --git a/hibernate-core/src/test/java/org/hibernate/test/unidir/Child1.java b/hibernate-core/src/test/java/org/hibernate/test/unidir/Child1.java new file mode 100644 index 0000000000..a6e0645d96 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/unidir/Child1.java @@ -0,0 +1,29 @@ +package org.hibernate.test.unidir; + +import javax.persistence.*; +import java.util.ArrayList; +import java.util.List; + +@Entity +@Table(name = "CHILD1") +public class Child1 { + @Id + @Column(name = "ID") + private Long id; + + @OneToMany(fetch = FetchType.LAZY) + @JoinColumn(name = "CHILD1_ID", nullable = false) + private List parents = new ArrayList(); + + public Long getId() { + return this.id; + } + + public List getParents() { + return this.parents; + } + + public void setParents(List parents) { + this.parents = parents; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/unidir/Child2.java b/hibernate-core/src/test/java/org/hibernate/test/unidir/Child2.java new file mode 100644 index 0000000000..89870948ea --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/unidir/Child2.java @@ -0,0 +1,29 @@ +package org.hibernate.test.unidir; + +import javax.persistence.*; +import java.util.ArrayList; +import java.util.List; + +@Entity +@Table(name = "CHILD2") +public class Child2 { + @Id + @Column(name = "ID") + private Long id; + + @OneToMany(fetch = FetchType.LAZY) + @JoinColumn(name = "CHILD2_ID", nullable = false) + private List parents = new ArrayList(); + + public Long getId() { + return this.id; + } + + public List getParents() { + return this.parents; + } + + public void setParents(List parents) { + this.parents = parents; + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/unidir/Parent1.java b/hibernate-core/src/test/java/org/hibernate/test/unidir/Parent1.java new file mode 100644 index 0000000000..fcc21838ed --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/unidir/Parent1.java @@ -0,0 +1,35 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors + * as indicated by the @authors tag. All rights reserved. + * See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This copyrighted material is made available to anyone wishing to use, + * modify, copy, or redistribute it subject to the terms and conditions + * of the GNU Lesser General Public License, v. 2.1. + * This program is distributed in the hope that it will be useful, but WITHOUT A + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. + * You should have received a copy of the GNU Lesser General Public License, + * v.2.1 along with this distribution; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, + * MA 02110-1301, USA. + */ +package org.hibernate.test.unidir; + +import javax.persistence.*; + +@Entity +@Table(name = "PARENT1") +public class Parent1 { + @Id + @Column(name = "ID") + Long id; + + public Long getId() { + return this.id; + } +} \ No newline at end of file diff --git a/hibernate-core/src/test/resources/log4j.properties b/hibernate-core/src/test/resources/log4j.properties index b4a9eebdab..686aae8fc2 100644 --- a/hibernate-core/src/test/resources/log4j.properties +++ b/hibernate-core/src/test/resources/log4j.properties @@ -14,4 +14,4 @@ log4j.logger.org.hibernate.SQL=debug log4j.logger.org.hibernate.hql.internal.ast=debug -log4j.logger.org.hibernate.sql.ordering.antlr=trace \ No newline at end of file +log4j.logger.org.hibernate.sql.ordering.antlr=debug \ No newline at end of file diff --git a/hibernate-core/src/test/resources/org/hibernate/test/abstractembeddedcomponents/cid/Mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/abstractembeddedcomponents/cid/Mappings.hbm.xml index 5b38690788..4e804b4473 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/abstractembeddedcomponents/cid/Mappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/abstractembeddedcomponents/cid/Mappings.hbm.xml @@ -10,7 +10,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/annotations/enumerated/ormXml/orm.xml b/hibernate-core/src/test/resources/org/hibernate/test/annotations/enumerated/ormXml/orm.xml new file mode 100644 index 0000000000..8628dea213 --- /dev/null +++ b/hibernate-core/src/test/resources/org/hibernate/test/annotations/enumerated/ormXml/orm.xml @@ -0,0 +1,15 @@ + + + + + + ORDINAL + + + + STRING + + + + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/cascade/MultiPathCascade.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/cascade/MultiPathCascade.hbm.xml index 4fe311cfb8..5d2c4f22c4 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/cascade/MultiPathCascade.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/cascade/MultiPathCascade.hbm.xml @@ -7,7 +7,7 @@ - + - - + + + + + + + - - - + - - - - - - - - - + + + + + - - - - - - - - + + + + + + + + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/entityname/Vehicle.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/entityname/Vehicle.hbm.xml index 01f7008a57..63ad71d959 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/entityname/Vehicle.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/entityname/Vehicle.hbm.xml @@ -10,7 +10,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/enums/mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/enums/mappings.hbm.xml new file mode 100644 index 0000000000..3000e3fb8f --- /dev/null +++ b/hibernate-core/src/test/resources/org/hibernate/test/enums/mappings.hbm.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + org.hibernate.test.enums.UnspecifiedEnumTypeEntity$E1 + + + + + + org.hibernate.test.enums.UnspecifiedEnumTypeEntity$E2 + + + + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/event/collection/detached/MultipleCollectionBagMapping.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/event/collection/detached/MultipleCollectionBagMapping.hbm.xml new file mode 100644 index 0000000000..3426406f49 --- /dev/null +++ b/hibernate-core/src/test/resources/org/hibernate/test/event/collection/detached/MultipleCollectionBagMapping.hbm.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/fetchprofiles/join/Mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/fetchprofiles/join/Mappings.hbm.xml index 9d259861df..10e311c155 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/fetchprofiles/join/Mappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/fetchprofiles/join/Mappings.hbm.xml @@ -59,7 +59,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/hql/Animal.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/hql/Animal.hbm.xml index 71e693d348..2519bfc421 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/hql/Animal.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/hql/Animal.hbm.xml @@ -77,7 +77,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/hqlfetchscroll/ParentChild.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/hqlfetchscroll/ParentChild.hbm.xml new file mode 100644 index 0000000000..266c5b930a --- /dev/null +++ b/hibernate-core/src/test/resources/org/hibernate/test/hqlfetchscroll/ParentChild.hbm.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/ContractVariation.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/ContractVariation.hbm.xml index 255885d78b..9c11bc52f8 100755 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/ContractVariation.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/ContractVariation.hbm.xml @@ -50,7 +50,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariation.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariation.hbm.xml index 77c65ba7ab..5647547436 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariation.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariation.hbm.xml @@ -49,7 +49,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationOneToManyJoin.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationOneToManyJoin.hbm.xml index 2d8f3ff9f8..ade4345448 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationOneToManyJoin.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationOneToManyJoin.hbm.xml @@ -56,7 +56,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersioned.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersioned.hbm.xml index 87788de256..d04cfcb6cc 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersioned.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersioned.hbm.xml @@ -53,7 +53,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersionedOneToManyJoin.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersionedOneToManyJoin.hbm.xml index 6aa87ea84e..09bb9f971f 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersionedOneToManyJoin.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/inverse/ContractVariationVersionedOneToManyJoin.hbm.xml @@ -60,7 +60,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariation.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariation.hbm.xml index 897f41f064..1d901a0a3c 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariation.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariation.hbm.xml @@ -49,7 +49,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationOneToManyJoin.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationOneToManyJoin.hbm.xml index d3125ea5f2..450a0a0521 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationOneToManyJoin.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationOneToManyJoin.hbm.xml @@ -56,7 +56,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationUnidir.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationUnidir.hbm.xml index f0f5ab65dd..0f6b1df3d3 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationUnidir.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationUnidir.hbm.xml @@ -48,7 +48,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersioned.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersioned.hbm.xml index de0ccba0fa..5239e5b487 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersioned.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersioned.hbm.xml @@ -53,7 +53,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersionedOneToManyJoin.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersionedOneToManyJoin.hbm.xml index 697339c74b..1961067418 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersionedOneToManyJoin.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/immutable/entitywithmutablecollection/noninverse/ContractVariationVersionedOneToManyJoin.hbm.xml @@ -60,7 +60,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/jpa/MyEntity.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/jpa/MyEntity.hbm.xml index 7670989df4..8709b16945 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/jpa/MyEntity.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/jpa/MyEntity.hbm.xml @@ -8,7 +8,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/lob/SerializableMappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/lob/SerializableMappings.hbm.xml index aa265f9a0b..ced7cc217d 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/lob/SerializableMappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/lob/SerializableMappings.hbm.xml @@ -5,7 +5,9 @@ - + + diff --git a/hibernate-core/src/test/java/org/hibernate/test/mapping/usertypes/TestEntity.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/mapping/usertypes/TestEntity.hbm.xml similarity index 100% rename from hibernate-core/src/test/java/org/hibernate/test/mapping/usertypes/TestEntity.hbm.xml rename to hibernate-core/src/test/resources/org/hibernate/test/mapping/usertypes/TestEntity.hbm.xml diff --git a/hibernate-core/src/test/java/org/hibernate/test/mapping/usertypes/TestEnumType.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/mapping/usertypes/TestEnumType.hbm.xml similarity index 100% rename from hibernate-core/src/test/java/org/hibernate/test/mapping/usertypes/TestEnumType.hbm.xml rename to hibernate-core/src/test/resources/org/hibernate/test/mapping/usertypes/TestEnumType.hbm.xml diff --git a/hibernate-core/src/test/resources/org/hibernate/test/math/Math.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/math/Math.hbm.xml new file mode 100755 index 0000000000..b46f288d5f --- /dev/null +++ b/hibernate-core/src/test/resources/org/hibernate/test/math/Math.hbm.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/datadirect/oracle/StoredProcedures.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/datadirect/oracle/StoredProcedures.hbm.xml index 40ed2f3ce5..3955117add 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/datadirect/oracle/StoredProcedures.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/datadirect/oracle/StoredProcedures.hbm.xml @@ -10,17 +10,17 @@ - + { call simpleScalar(:number) } - + { call testParamHandling(?,?) } - + { call testParamHandling(?,:second) } @@ -34,7 +34,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/db2/Mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/db2/Mappings.hbm.xml index 30b254e3e9..1ad25d2ec4 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/db2/Mappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/db2/Mappings.hbm.xml @@ -51,7 +51,7 @@ - + @@ -162,7 +162,7 @@ - + @@ -179,18 +179,18 @@ - + { call simpleScalar(:number) } - + { call paramHandling(?,?) } - + { call paramHandling(?,:second) } @@ -206,7 +206,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/mysql/Mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/mysql/Mappings.hbm.xml index acb905afb7..5864851541 100755 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/mysql/Mappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/mysql/Mappings.hbm.xml @@ -52,7 +52,7 @@ - + @@ -160,7 +160,7 @@ - + @@ -177,18 +177,18 @@ - + { call simpleScalar(:number) } - + { call paramHandling(?,?) } - + { call paramHandling(?,:second) } @@ -204,7 +204,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/Mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/Mappings.hbm.xml index dc433230a2..480e0aa84b 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/Mappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/Mappings.hbm.xml @@ -52,7 +52,7 @@ - + @@ -154,7 +154,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/StoredProcedures.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/StoredProcedures.hbm.xml index db4b3c9c61..7c91753fe4 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/StoredProcedures.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/oracle/StoredProcedures.hbm.xml @@ -10,18 +10,18 @@ - + { ? = call simpleScalar(:number) } - + { ? = call testParamHandling(?,?) } - + { ? = call testParamHandling(?,:second) } @@ -35,7 +35,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sqlserver/Mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sqlserver/Mappings.hbm.xml index 1d7a816e8b..fa7454a2a2 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sqlserver/Mappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sqlserver/Mappings.hbm.xml @@ -161,7 +161,7 @@ - + @@ -178,18 +178,18 @@ - + { call simpleScalar(:number) } - + { call paramHandling(?,?) } - + { call paramHandling(?,:second) } @@ -205,7 +205,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sybase/Mappings.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sybase/Mappings.hbm.xml index 956a7e35ae..1db4b9ec5d 100755 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sybase/Mappings.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/custom/sybase/Mappings.hbm.xml @@ -51,7 +51,7 @@ - + @@ -161,7 +161,7 @@ - + @@ -178,18 +178,18 @@ - + { call simpleScalar(:number) } - + { call paramHandling(?,?) } - + { call paramHandling(?,:second) } @@ -205,7 +205,7 @@ - + diff --git a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/query/NativeSQLQueries.hbm.xml b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/query/NativeSQLQueries.hbm.xml index 82e336825e..70c8608bd4 100644 --- a/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/query/NativeSQLQueries.hbm.xml +++ b/hibernate-core/src/test/resources/org/hibernate/test/sql/hand/query/NativeSQLQueries.hbm.xml @@ -53,7 +53,7 @@ - + @@ -191,7 +191,7 @@ - SELECT org.NAME AS thename, org.NAME AS {org.name}, org.ORGID AS {org.id} + SELECT org.NAME AS thename, org.NAME AS {org.name}, org.ORGID AS {org.id} FROM ORGANIZATION org ORDER BY thename @@ -199,10 +199,10 @@ - SELECT org.NAME AS thename, org.NAME AS {org.name}, org.ORGID AS {org.id} + SELECT org.NAME AS thename, org.NAME AS {org.name}, org.ORGID AS {org.id} FROM ORGANIZATION org ORDER BY thename - + @@ -225,11 +225,11 @@ - SELECT empcol.EMPLOYER as {empcol.key}, empcol.EMPID as {empcol.element}, {empcol.element.*} + SELECT empcol.EMPLOYER as {empcol.key}, empcol.EMPID as {empcol.element}, {empcol.element.*} FROM EMPLOYMENT empcol WHERE EMPLOYER = :id ORDER BY STARTDATE ASC, EMPLOYEE ASC - + @@ -238,15 +238,15 @@ - + - + - - - + + + - + @@ -259,13 +259,13 @@ emp.STARTDATE as xstartDate, emp.ENDDATE as endDate, emp.REGIONCODE as regionCode, - emp.VALUE as VALUE, + emp.AMOUNT as AMOUNT, emp.CURRENCY as CURRENCY FROM ORGANIZATION org LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER - + @@ -279,7 +279,7 @@ emp.STARTDATE as startDate, emp.ENDDATE as endDate, emp.REGIONCODE as regionCode, - emp.VALUE as VALUE, + emp.AMOUNT as AMOUNT, emp.CURRENCY as CURRENCY FROM ORGANIZATION org LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER diff --git a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/HibernatePersistenceProvider.java b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/HibernatePersistenceProvider.java index 5987aee1d9..90054fea1f 100755 --- a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/HibernatePersistenceProvider.java +++ b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/HibernatePersistenceProvider.java @@ -82,7 +82,7 @@ public class HibernatePersistenceProvider implements PersistenceProvider { continue; } - return Bootstrap.getEntityManagerFactoryBuilder( persistenceUnit, integration ).buildEntityManagerFactory(); + return Bootstrap.getEntityManagerFactoryBuilder( persistenceUnit, integration ).build(); } return null; @@ -100,7 +100,7 @@ public class HibernatePersistenceProvider implements PersistenceProvider { */ @Override public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map integration) { - return Bootstrap.getEntityManagerFactoryBuilder( info, integration ).buildEntityManagerFactory(); + return Bootstrap.getEntityManagerFactoryBuilder( info, integration ).build(); } private final ProviderUtil providerUtil = new ProviderUtil() { diff --git a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderImpl.java b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderImpl.java index 2ab717eaef..8ec697d83d 100644 --- a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderImpl.java +++ b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderImpl.java @@ -23,15 +23,6 @@ */ package org.hibernate.jpa.boot.internal; -import javax.persistence.AttributeConverter; -import javax.persistence.Converter; -import javax.persistence.Embeddable; -import javax.persistence.Entity; -import javax.persistence.EntityManagerFactory; -import javax.persistence.EntityNotFoundException; -import javax.persistence.MappedSuperclass; -import javax.persistence.PersistenceException; -import javax.persistence.spi.PersistenceUnitTransactionType; import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; @@ -49,15 +40,16 @@ import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; -import org.jboss.jandex.AnnotationInstance; -import org.jboss.jandex.ClassInfo; -import org.jboss.jandex.CompositeIndex; -import org.jboss.jandex.DotName; -import org.jboss.jandex.Index; -import org.jboss.jandex.IndexView; -import org.jboss.jandex.Indexer; - -import org.jboss.logging.Logger; +import javax.persistence.AttributeConverter; +import javax.persistence.Converter; +import javax.persistence.Embeddable; +import javax.persistence.Entity; +import javax.persistence.EntityManagerFactory; +import javax.persistence.EntityNotFoundException; +import javax.persistence.MappedSuperclass; +import javax.persistence.PersistenceException; +import javax.persistence.spi.PersistenceUnitTransactionType; +import javax.sql.DataSource; import org.hibernate.Interceptor; import org.hibernate.MappingException; @@ -69,6 +61,7 @@ import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; +import org.hibernate.boot.registry.selector.spi.StrategySelector; import org.hibernate.boot.spi.CacheRegionDefinition; import org.hibernate.boot.spi.JaccDefinition; import org.hibernate.cfg.Configuration; @@ -81,6 +74,7 @@ import org.hibernate.engine.transaction.internal.jta.CMTTransactionFactory; import org.hibernate.id.factory.spi.MutableIdentifierGeneratorFactory; import org.hibernate.integrator.spi.Integrator; import org.hibernate.internal.util.StringHelper; +import org.hibernate.jaxb.spi.cfg.JaxbHibernateConfiguration.JaxbSessionFactory.JaxbMapping; import org.hibernate.jpa.AvailableSettings; import org.hibernate.jpa.boot.spi.EntityManagerFactoryBuilder; import org.hibernate.jpa.boot.spi.IntegratorProvider; @@ -101,8 +95,14 @@ import org.hibernate.proxy.EntityNotFoundDelegate; import org.hibernate.secure.internal.JACCConfiguration; import org.hibernate.service.ServiceRegistry; import org.hibernate.service.spi.ServiceRegistryImplementor; - -import static org.hibernate.jaxb.spi.cfg.JaxbHibernateConfiguration.JaxbSessionFactory.JaxbMapping; +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.CompositeIndex; +import org.jboss.jandex.DotName; +import org.jboss.jandex.Index; +import org.jboss.jandex.IndexView; +import org.jboss.jandex.Indexer; +import org.jboss.logging.Logger; /** * @author Steve Ebersole @@ -130,10 +130,17 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil public static final String JANDEX_INDEX = "hibernate.jandex_index"; // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // Explicit "injectables" + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + private Object validatorFactory; + private DataSource dataSource; + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + private final PersistenceUnitDescriptor persistenceUnit; private final SettingsImpl settings = new SettingsImpl(); private final StandardServiceRegistryBuilder serviceRegistryBuilder; - private final Map configurationValues; + private final Map configurationValues; private final List jaccDefinitions = new ArrayList(); private final List cacheRegionDefinitions = new ArrayList(); @@ -174,8 +181,6 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil this.configurationValues = mergePropertySources( persistenceUnit, integrationSettings, bootstrapServiceRegistry ); // add all merged configuration values into the service registry builder this.serviceRegistryBuilder.applySettings( configurationValues ); - // And being processing those configuration values - processProperties( bootstrapServiceRegistry ); // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Next we do a preliminary pass at metadata processing, which involves: @@ -187,6 +192,8 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil // 3) building "metadata sources" to keep for later to use in building the SessionFactory metadataSources = prepareMetadataSources( jandexIndex, collectedManagedClassNames, scanResult, bootstrapServiceRegistry ); + withValidatorFactory( configurationValues.get( AvailableSettings.VALIDATION_FACTORY ) ); + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // push back class transformation to the environment; for the time being this only has any effect in EE // container situations, calling back into PersistenceUnitInfo#addClassTransformer @@ -249,7 +256,8 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // temporary! - public Map getConfigurationValues() { + @SuppressWarnings("unchecked") + public Map getConfigurationValues() { return Collections.unmodifiableMap( configurationValues ); } @@ -369,130 +377,6 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil } } - private void processProperties(BootstrapServiceRegistry bootstrapServiceRegistry) { - applyJdbcConnectionProperties(); - applyTransactionProperties(); - - final Object validationFactory = configurationValues.get( AvailableSettings.VALIDATION_FACTORY ); - if ( validationFactory != null ) { - BeanValidationIntegrator.validateFactory( validationFactory ); - } - - // flush before completion validation - if ( "true".equals( configurationValues.get( Environment.FLUSH_BEFORE_COMPLETION ) ) ) { - serviceRegistryBuilder.applySetting( Environment.FLUSH_BEFORE_COMPLETION, "false" ); - LOG.definingFlushBeforeCompletionIgnoredInHem( Environment.FLUSH_BEFORE_COMPLETION ); - } - - for ( Map.Entry entry : configurationValues.entrySet() ) { - if ( entry.getKey() instanceof String ) { - final String keyString = (String) entry.getKey(); - - if ( AvailableSettings.INTERCEPTOR.equals( keyString ) ) { - sessionFactoryInterceptor = instantiateCustomClassFromConfiguration( - entry.getValue(), - Interceptor.class, - bootstrapServiceRegistry - ); - } - else if ( AvailableSettings.SESSION_INTERCEPTOR.equals( keyString ) ) { - settings.setSessionInterceptorClass( - loadSessionInterceptorClass( entry.getValue(), bootstrapServiceRegistry ) - ); - } - else if ( AvailableSettings.NAMING_STRATEGY.equals( keyString ) ) { - namingStrategy = instantiateCustomClassFromConfiguration( - entry.getValue(), - NamingStrategy.class, - bootstrapServiceRegistry - ); - } - else if ( AvailableSettings.SESSION_FACTORY_OBSERVER.equals( keyString ) ) { - suppliedSessionFactoryObserver = instantiateCustomClassFromConfiguration( - entry.getValue(), - SessionFactoryObserver.class, - bootstrapServiceRegistry - ); - } - else if ( AvailableSettings.DISCARD_PC_ON_CLOSE.equals( keyString ) ) { - settings.setReleaseResourcesOnCloseEnabled( "true".equals( entry.getValue() ) ); - } - else if ( keyString.startsWith( AvailableSettings.CLASS_CACHE_PREFIX ) ) { - addCacheRegionDefinition( - keyString.substring( AvailableSettings.CLASS_CACHE_PREFIX.length() + 1 ), - (String) entry.getValue(), - CacheRegionDefinition.CacheRegionType.ENTITY - ); - } - else if ( keyString.startsWith( AvailableSettings.COLLECTION_CACHE_PREFIX ) ) { - addCacheRegionDefinition( - keyString.substring( AvailableSettings.COLLECTION_CACHE_PREFIX.length() + 1 ), - (String) entry.getValue(), - CacheRegionDefinition.CacheRegionType.COLLECTION - ); - } - else if ( keyString.startsWith( AvailableSettings.JACC_PREFIX ) - && ! ( keyString.equals( AvailableSettings.JACC_CONTEXT_ID ) - || keyString.equals( AvailableSettings.JACC_ENABLED ) ) ) { - addJaccDefinition( (String) entry.getKey(), entry.getValue() ); - } - } - } - } - - private void applyJdbcConnectionProperties() { - if ( persistenceUnit.getJtaDataSource() != null ) { - serviceRegistryBuilder.applySetting( Environment.DATASOURCE, persistenceUnit.getJtaDataSource() ); - } - else if ( persistenceUnit.getNonJtaDataSource() != null ) { - serviceRegistryBuilder.applySetting( Environment.DATASOURCE, persistenceUnit.getNonJtaDataSource() ); - } - else { - final String driver = (String) configurationValues.get( AvailableSettings.JDBC_DRIVER ); - if ( StringHelper.isNotEmpty( driver ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.DRIVER, driver ); - } - final String url = (String) configurationValues.get( AvailableSettings.JDBC_URL ); - if ( StringHelper.isNotEmpty( url ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.URL, url ); - } - final String user = (String) configurationValues.get( AvailableSettings.JDBC_USER ); - if ( StringHelper.isNotEmpty( user ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.USER, user ); - } - final String pass = (String) configurationValues.get( AvailableSettings.JDBC_PASSWORD ); - if ( StringHelper.isNotEmpty( pass ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.PASS, pass ); - } - } - } - - private void applyTransactionProperties() { - PersistenceUnitTransactionType txnType = PersistenceUnitTransactionTypeHelper.interpretTransactionType( - configurationValues.get( AvailableSettings.TRANSACTION_TYPE ) - ); - if ( txnType == null ) { - txnType = persistenceUnit.getTransactionType(); - } - if ( txnType == null ) { - // is it more appropriate to have this be based on bootstrap entry point (EE vs SE)? - txnType = PersistenceUnitTransactionType.RESOURCE_LOCAL; - } - settings.setTransactionType( txnType ); - boolean hasTxStrategy = configurationValues.containsKey( Environment.TRANSACTION_STRATEGY ); - if ( hasTxStrategy ) { - LOG.overridingTransactionStrategyDangerous( Environment.TRANSACTION_STRATEGY ); - } - else { - if ( txnType == PersistenceUnitTransactionType.JTA ) { - serviceRegistryBuilder.applySetting( Environment.TRANSACTION_STRATEGY, CMTTransactionFactory.class ); - } - else if ( txnType == PersistenceUnitTransactionType.RESOURCE_LOCAL ) { - serviceRegistryBuilder.applySetting( Environment.TRANSACTION_STRATEGY, JdbcTransactionFactory.class ); - } - } - } - private String jaccContextId; private void addJaccDefinition(String key, Object value) { @@ -522,35 +406,6 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil } } - @SuppressWarnings("unchecked") - private Class loadSessionInterceptorClass( - Object value, - BootstrapServiceRegistry bootstrapServiceRegistry) { - if ( value == null ) { - return null; - } - - Class theClass; - if ( Class.class.isInstance( value ) ) { - theClass = (Class) value; - } - else { - theClass = bootstrapServiceRegistry.getService( ClassLoaderService.class ).classForName( value.toString() ); - } - - try { - return (Class) theClass; - } - catch (ClassCastException e) { - throw persistenceException( - String.format( - "Specified Interceptor implementation class [%s] was not castable to Interceptor", - theClass.getName() - ) - ); - } - } - private void addCacheRegionDefinition(String role, String value, CacheRegionDefinition.CacheRegionType cacheType) { final StringTokenizer params = new StringTokenizer( value, ";, " ); if ( !params.hasMoreTokens() ) { @@ -592,66 +447,6 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil cacheRegionDefinitions.add( def ); } - @SuppressWarnings("unchecked") - private T instantiateCustomClassFromConfiguration( - Object value, - Class type, - ServiceRegistry bootstrapServiceRegistry) { - if ( value == null ) { - return null; - } - - if ( type.isInstance( value ) ) { - return (T) value; - } - - final Class implementationClass; - - if ( Class.class.isInstance( value ) ) { - try { - implementationClass = (Class) value; - } - catch (ClassCastException e) { - throw persistenceException( - String.format( - "Specified implementation class [%s] was not of expected type [%s]", - ((Class) value).getName(), - type.getName() - ) - ); - } - } - else { - final String implementationClassName = value.toString(); - try { - implementationClass = bootstrapServiceRegistry.getService( ClassLoaderService.class ) - .classForName( implementationClassName ); - } - catch (ClassCastException e) { - throw persistenceException( - String.format( - "Specified implementation class [%s] was not of expected type [%s]", - implementationClassName, - type.getName() - ) - ); - } - } - - try { - return implementationClass.newInstance(); - } - catch (Exception e) { - throw persistenceException( - String.format( - "Unable to instantiate specified implementation class [%s]", - implementationClass.getName() - ), - e - ); - } - } - @SuppressWarnings("unchecked") private ScanResult scan(BootstrapServiceRegistry bootstrapServiceRegistry) { Scanner scanner = locateOrBuildScanner( bootstrapServiceRegistry ); @@ -799,6 +594,23 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil } } + @Override + public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) { + this.validatorFactory = validatorFactory; + + if ( validatorFactory != null ) { + BeanValidationIntegrator.validateFactory( validatorFactory ); + } + return this; + } + + @Override + public EntityManagerFactoryBuilder withDataSource(DataSource dataSource) { + this.dataSource = dataSource; + + return this; + } + @Override public void cancel() { // todo : close the bootstrap registry (not critical, but nice to do) @@ -806,14 +618,16 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil } @SuppressWarnings("unchecked") - public EntityManagerFactory buildEntityManagerFactory() { - // IMPL NOTE : TCCL handling here is temporary. - // It is needed because this code still uses Hibernate Configuration and Hibernate commons-annotations - // in turn which relies on TCCL being set. + public EntityManagerFactory build() { + processProperties(); final ServiceRegistry serviceRegistry = buildServiceRegistry(); final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class ); + // IMPL NOTE : TCCL handling here is temporary. + // It is needed because this code still uses Hibernate Configuration and Hibernate commons-annotations + // in turn which relies on TCCL being set. + return ( (ClassLoaderServiceImpl) classLoaderService ).withTccl( new ClassLoaderServiceImpl.Work() { @Override @@ -848,6 +662,139 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil ); } + private void processProperties() { + applyJdbcConnectionProperties(); + applyTransactionProperties(); + + Object validationFactory = this.validatorFactory; + if ( validationFactory == null ) { + validationFactory = configurationValues.get( AvailableSettings.VALIDATION_FACTORY ); + } + if ( validationFactory != null ) { + BeanValidationIntegrator.validateFactory( validationFactory ); + serviceRegistryBuilder.applySetting( AvailableSettings.VALIDATION_FACTORY, validationFactory ); + } + + // flush before completion validation + if ( "true".equals( configurationValues.get( Environment.FLUSH_BEFORE_COMPLETION ) ) ) { + serviceRegistryBuilder.applySetting( Environment.FLUSH_BEFORE_COMPLETION, "false" ); + LOG.definingFlushBeforeCompletionIgnoredInHem( Environment.FLUSH_BEFORE_COMPLETION ); + } + + final StrategySelector strategySelector = serviceRegistryBuilder.getBootstrapServiceRegistry().getService( StrategySelector.class ); + + for ( Object oEntry : configurationValues.entrySet() ) { + Map.Entry entry = (Map.Entry) oEntry; + if ( entry.getKey() instanceof String ) { + final String keyString = (String) entry.getKey(); + + if ( AvailableSettings.INTERCEPTOR.equals( keyString ) ) { + sessionFactoryInterceptor = strategySelector.resolveStrategy( Interceptor.class, entry.getValue() ); + } + else if ( AvailableSettings.SESSION_INTERCEPTOR.equals( keyString ) ) { + settings.setSessionInterceptorClass( + loadSessionInterceptorClass( entry.getValue(), strategySelector ) + ); + } + else if ( AvailableSettings.NAMING_STRATEGY.equals( keyString ) ) { + namingStrategy = strategySelector.resolveStrategy( NamingStrategy.class, entry.getValue() ); + } + else if ( AvailableSettings.SESSION_FACTORY_OBSERVER.equals( keyString ) ) { + suppliedSessionFactoryObserver = strategySelector.resolveStrategy( SessionFactoryObserver.class, entry.getValue() ); + } + else if ( AvailableSettings.DISCARD_PC_ON_CLOSE.equals( keyString ) ) { + settings.setReleaseResourcesOnCloseEnabled( "true".equals( entry.getValue() ) ); + } + else if ( keyString.startsWith( AvailableSettings.CLASS_CACHE_PREFIX ) ) { + addCacheRegionDefinition( + keyString.substring( AvailableSettings.CLASS_CACHE_PREFIX.length() + 1 ), + (String) entry.getValue(), + CacheRegionDefinition.CacheRegionType.ENTITY + ); + } + else if ( keyString.startsWith( AvailableSettings.COLLECTION_CACHE_PREFIX ) ) { + addCacheRegionDefinition( + keyString.substring( AvailableSettings.COLLECTION_CACHE_PREFIX.length() + 1 ), + (String) entry.getValue(), + CacheRegionDefinition.CacheRegionType.COLLECTION + ); + } + else if ( keyString.startsWith( AvailableSettings.JACC_PREFIX ) + && ! ( keyString.equals( AvailableSettings.JACC_CONTEXT_ID ) + || keyString.equals( AvailableSettings.JACC_ENABLED ) ) ) { + addJaccDefinition( (String) entry.getKey(), entry.getValue() ); + } + } + } + } + + private void applyJdbcConnectionProperties() { + if ( dataSource != null ) { + serviceRegistryBuilder.applySetting( Environment.DATASOURCE, dataSource ); + } + else if ( persistenceUnit.getJtaDataSource() != null ) { + serviceRegistryBuilder.applySetting( Environment.DATASOURCE, persistenceUnit.getJtaDataSource() ); + } + else if ( persistenceUnit.getNonJtaDataSource() != null ) { + serviceRegistryBuilder.applySetting( Environment.DATASOURCE, persistenceUnit.getNonJtaDataSource() ); + } + else { + final String driver = (String) configurationValues.get( AvailableSettings.JDBC_DRIVER ); + if ( StringHelper.isNotEmpty( driver ) ) { + serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.DRIVER, driver ); + } + final String url = (String) configurationValues.get( AvailableSettings.JDBC_URL ); + if ( StringHelper.isNotEmpty( url ) ) { + serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.URL, url ); + } + final String user = (String) configurationValues.get( AvailableSettings.JDBC_USER ); + if ( StringHelper.isNotEmpty( user ) ) { + serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.USER, user ); + } + final String pass = (String) configurationValues.get( AvailableSettings.JDBC_PASSWORD ); + if ( StringHelper.isNotEmpty( pass ) ) { + serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.PASS, pass ); + } + } + } + + private void applyTransactionProperties() { + PersistenceUnitTransactionType txnType = PersistenceUnitTransactionTypeHelper.interpretTransactionType( + configurationValues.get( AvailableSettings.TRANSACTION_TYPE ) + ); + if ( txnType == null ) { + txnType = persistenceUnit.getTransactionType(); + } + if ( txnType == null ) { + // is it more appropriate to have this be based on bootstrap entry point (EE vs SE)? + txnType = PersistenceUnitTransactionType.RESOURCE_LOCAL; + } + settings.setTransactionType( txnType ); + boolean hasTxStrategy = configurationValues.containsKey( Environment.TRANSACTION_STRATEGY ); + if ( hasTxStrategy ) { + LOG.overridingTransactionStrategyDangerous( Environment.TRANSACTION_STRATEGY ); + } + else { + if ( txnType == PersistenceUnitTransactionType.JTA ) { + serviceRegistryBuilder.applySetting( Environment.TRANSACTION_STRATEGY, CMTTransactionFactory.class ); + } + else if ( txnType == PersistenceUnitTransactionType.RESOURCE_LOCAL ) { + serviceRegistryBuilder.applySetting( Environment.TRANSACTION_STRATEGY, JdbcTransactionFactory.class ); + } + } + } + + @SuppressWarnings("unchecked") + private Class loadSessionInterceptorClass(Object value, StrategySelector strategySelector) { + if ( value == null ) { + return null; + } + + return Class.class.isInstance( value ) + ? (Class) value + : strategySelector.selectStrategyImplementor( Interceptor.class, value.toString() ); + } + public ServiceRegistry buildServiceRegistry() { return serviceRegistryBuilder.build(); } @@ -867,11 +814,12 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil cfg.setInterceptor( sessionFactoryInterceptor ); } - final IdentifierGeneratorStrategyProvider strategyProvider = instantiateCustomClassFromConfiguration( - props.get( AvailableSettings.IDENTIFIER_GENERATOR_STRATEGY_PROVIDER ), - IdentifierGeneratorStrategyProvider.class, - serviceRegistry - ); + final Object strategyProviderValue = props.get( AvailableSettings.IDENTIFIER_GENERATOR_STRATEGY_PROVIDER ); + final IdentifierGeneratorStrategyProvider strategyProvider = strategyProviderValue == null + ? null + : serviceRegistry.getService( StrategySelector.class ) + .resolveStrategy( IdentifierGeneratorStrategyProvider.class, strategyProviderValue ); + if ( strategyProvider != null ) { final MutableIdentifierGeneratorFactory identifierGeneratorFactory = cfg.getIdentifierGeneratorFactory(); for ( Map.Entry> entry : strategyProvider.getStrategies().entrySet() ) { diff --git a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderUsingMetamodelImpl.java b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderUsingMetamodelImpl.java deleted file mode 100644 index ae0b7ada68..0000000000 --- a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderUsingMetamodelImpl.java +++ /dev/null @@ -1,661 +0,0 @@ -/* - * Hibernate, Relational Persistence for Idiomatic Java - * - * Copyright (c) 2012, Red Hat Inc. or third-party contributors as - * indicated by the @author tags or express copyright attribution - * statements applied by the authors. All third-party contributions are - * distributed under license by Red Hat Inc. - * - * This copyrighted material is made available to anyone wishing to use, modify, - * copy, or redistribute it subject to the terms and conditions of the GNU - * Lesser General Public License, as published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License - * for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this distribution; if not, write to: - * Free Software Foundation, Inc. - * 51 Franklin Street, Fifth Floor - * Boston, MA 02110-1301 USA - */ -package org.hibernate.jpa.boot.internal; - -import javax.persistence.EntityManagerFactory; -import javax.persistence.EntityNotFoundException; -import javax.persistence.PersistenceException; -import javax.persistence.SharedCacheMode; -import javax.persistence.spi.PersistenceUnitTransactionType; -import java.io.File; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.StringTokenizer; - -import org.jboss.logging.Logger; - -import org.hibernate.CustomEntityDirtinessStrategy; -import org.hibernate.EntityNameResolver; -import org.hibernate.Interceptor; -import org.hibernate.SessionFactory; -import org.hibernate.SessionFactoryObserver; -import org.hibernate.boot.registry.BootstrapServiceRegistry; -import org.hibernate.boot.registry.StandardServiceRegistryBuilder; -import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; -import org.hibernate.boot.spi.CacheRegionDefinition; -import org.hibernate.boot.spi.JaccDefinition; -import org.hibernate.cache.spi.access.AccessType; -import org.hibernate.cfg.Environment; -import org.hibernate.cfg.MetadataSourceType; -import org.hibernate.cfg.NamingStrategy; -import org.hibernate.cfg.beanvalidation.BeanValidationIntegrator; -import org.hibernate.context.spi.CurrentTenantIdentifierResolver; -import org.hibernate.engine.spi.SessionFactoryImplementor; -import org.hibernate.engine.transaction.internal.jdbc.JdbcTransactionFactory; -import org.hibernate.engine.transaction.internal.jta.CMTTransactionFactory; -import org.hibernate.internal.util.StringHelper; -import org.hibernate.internal.util.config.ConfigurationHelper; -import org.hibernate.jpa.AvailableSettings; -import org.hibernate.jpa.boot.spi.EntityManagerFactoryBuilder; -import org.hibernate.jpa.boot.spi.JpaUnifiedSettingsBuilder; -import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor; -import org.hibernate.jpa.boot.spi.Settings; -import org.hibernate.jpa.internal.EntityManagerFactoryImpl; -import org.hibernate.jpa.internal.EntityManagerMessageLogger; -import org.hibernate.jpa.internal.util.LogHelper; -import org.hibernate.jpa.internal.util.PersistenceUnitTransactionTypeHelper; -import org.hibernate.jpa.internal.util.SharedCacheModeHelper; -import org.hibernate.metamodel.Metadata; -import org.hibernate.metamodel.MetadataBuilder; -import org.hibernate.metamodel.MetadataSourceProcessingOrder; -import org.hibernate.metamodel.MetadataSources; -import org.hibernate.metamodel.SessionFactoryBuilder; -import org.hibernate.proxy.EntityNotFoundDelegate; -import org.hibernate.service.ServiceRegistry; -import org.hibernate.service.spi.ServiceRegistryImplementor; - -import static org.hibernate.jaxb.spi.cfg.JaxbHibernateConfiguration.JaxbSessionFactory.JaxbMapping; -import static org.hibernate.jpa.boot.spi.JpaBootstrapServiceRegistryBuilder.buildBootstrapServiceRegistry; -import static org.hibernate.jpa.boot.spi.JpaUnifiedSettingsBuilder.CfgXmlMappingArtifacts; - -/** - * This will eventually replace {@link EntityManagerFactoryBuilderImpl} - * - * @author Steve Ebersole - * - * @deprecated This class will go away before 5.0 even goes alpha and its functionality will replace that in - * {@link EntityManagerFactoryBuilderImpl}. - */ -@Deprecated -public class EntityManagerFactoryBuilderUsingMetamodelImpl implements EntityManagerFactoryBuilder { - private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger( - EntityManagerMessageLogger.class, - EntityManagerFactoryBuilderImpl.class.getName() - ); - - private final PersistenceUnitDescriptor persistenceUnit; - private final Map configurationValues; - - private final BootstrapServiceRegistry bootstrapServiceRegistry; - private final MetadataSources metadataSources; - - private final List jaccDefinitions = new ArrayList(); // todo : see HHH-7462 - private final List cacheRegionDefinitions = new ArrayList(); - - public EntityManagerFactoryBuilderUsingMetamodelImpl( - PersistenceUnitDescriptor persistenceUnit, - Map integrationSettings) { - LogHelper.logPersistenceUnitInformation( persistenceUnit ); - - this.persistenceUnit = persistenceUnit; - if ( integrationSettings == null ) { - integrationSettings = Collections.emptyMap(); - } - - // build the boot-strap service registry, which mainly handles class loader interactions - this.bootstrapServiceRegistry = buildBootstrapServiceRegistry( - persistenceUnit, - integrationSettings - ); - - final JpaUnifiedSettingsBuilder.Result mergedResult = JpaUnifiedSettingsBuilder.mergePropertySources( - persistenceUnit, - integrationSettings, - bootstrapServiceRegistry - ); - - final CfgXmlMappingArtifacts cfgXmlMappingArtifacts = mergedResult.getCfgXmlMappingArtifacts(); - this.configurationValues = mergedResult.getSettings(); - - // todo : add scanning... - - this.metadataSources = new MetadataSources( bootstrapServiceRegistry ); - for ( JaxbMapping jaxbMapping : cfgXmlMappingArtifacts.getMappings() ) { - if ( jaxbMapping.getClazz() != null ) { - metadataSources.addAnnotatedClassName( jaxbMapping.getClazz() ); - } - else if ( jaxbMapping.getResource() != null ) { - metadataSources.addResource( jaxbMapping.getResource() ); - } - else if ( jaxbMapping.getJar() != null ) { - metadataSources.addJar( new File( jaxbMapping.getJar() ) ); - } - else if ( jaxbMapping.getPackage() != null ) { - metadataSources.addPackage( jaxbMapping.getPackage() ); - } - } - - // todo : add results of scanning to the MetadataSources - - metadataSources.addCacheRegionDefinitions( cacheRegionDefinitions ); - } - - @Override - public void cancel() { - // currently nothing to do... - } - - @Override - public EntityManagerFactory buildEntityManagerFactory() { - final StandardServiceRegistryBuilder serviceRegistryBuilder = new StandardServiceRegistryBuilder( bootstrapServiceRegistry ); - final SpecialProperties specialProperties = processProperties( serviceRegistryBuilder ); - - final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder(); - prepareMetadataBuilder( metadataBuilder, specialProperties ); - final Metadata metadata = metadataBuilder.build(); - - final SessionFactoryBuilder sessionFactoryBuilder = metadata.getSessionFactoryBuilder(); - prepareSessionFactoryBuilder( sessionFactoryBuilder, specialProperties ); - sessionFactoryBuilder.add( new ServiceRegistryCloser() ); - SessionFactoryImplementor sessionFactory = (SessionFactoryImplementor) sessionFactoryBuilder.build(); - - final Settings emfCreationSettings = prepareEntitytManagerFactoryCreationSettings( specialProperties ); - - // IMPL NOTE : the last param (passed as null) is the Configuration which we pass in at the moment solely to - // get access to the mapping information in order to build the JPA javax.persistence.metamodel.Metamodel - // We need to change that to leverage the new Hibernate Metadata metamodel package anyway.. - - return new EntityManagerFactoryImpl( - persistenceUnit.getName(), - sessionFactory, - (SettingsImpl) emfCreationSettings, - configurationValues, - null - ); - } - - private SpecialProperties processProperties(StandardServiceRegistryBuilder serviceRegistryBuilder) { - final SpecialProperties specialProperties = new SpecialProperties(); - - applyJdbcConnectionProperties( serviceRegistryBuilder ); - applyTransactionProperties( serviceRegistryBuilder, specialProperties ); - - final Object validationFactory = configurationValues.get( AvailableSettings.VALIDATION_FACTORY ); - if ( validationFactory != null ) { - BeanValidationIntegrator.validateFactory( validationFactory ); - } - - // flush before completion validation - if ( "true".equals( configurationValues.get( Environment.FLUSH_BEFORE_COMPLETION ) ) ) { - serviceRegistryBuilder.applySetting( Environment.FLUSH_BEFORE_COMPLETION, "false" ); - LOG.definingFlushBeforeCompletionIgnoredInHem( Environment.FLUSH_BEFORE_COMPLETION ); - } - - for ( Map.Entry entry : configurationValues.entrySet() ) { - if ( entry.getKey() instanceof String ) { - final String keyString = (String) entry.getKey(); - - //noinspection deprecation - if ( AvailableSettings.INTERCEPTOR.equals( keyString ) - || org.hibernate.cfg.AvailableSettings.INTERCEPTOR.equals( keyString ) ) { - specialProperties.sessionFactoryInterceptor = instantiateCustomClassFromConfiguration( - entry.getValue(), - Interceptor.class, - bootstrapServiceRegistry - ); - } - else if ( AvailableSettings.SESSION_INTERCEPTOR.equals( keyString ) ) { - specialProperties.sessionInterceptorClass = loadSessionInterceptorClass( - entry.getValue(), - bootstrapServiceRegistry - ); - } - else if ( AvailableSettings.NAMING_STRATEGY.equals( keyString ) ) { - specialProperties.namingStrategy = instantiateCustomClassFromConfiguration( - entry.getValue(), - NamingStrategy.class, - bootstrapServiceRegistry - ); - } - else if ( AvailableSettings.SESSION_FACTORY_OBSERVER.equals( keyString ) ) { - specialProperties.sessionFactoryObserver = instantiateCustomClassFromConfiguration( - entry.getValue(), - SessionFactoryObserver.class, - bootstrapServiceRegistry - ); - } - else if ( org.hibernate.cfg.AvailableSettings.CUSTOM_ENTITY_DIRTINESS_STRATEGY.equals( keyString ) ) { - specialProperties.customEntityDirtinessStrategy = instantiateCustomClassFromConfiguration( - entry.getValue(), - CustomEntityDirtinessStrategy.class, - bootstrapServiceRegistry - ); - } - else if ( org.hibernate.cfg.AvailableSettings.MULTI_TENANT_IDENTIFIER_RESOLVER.equals( keyString ) ) { - specialProperties.currentTenantIdentifierResolver = instantiateCustomClassFromConfiguration( - entry.getValue(), - CurrentTenantIdentifierResolver.class, - bootstrapServiceRegistry - ); - } - else if ( AvailableSettings.DISCARD_PC_ON_CLOSE.equals( keyString ) ) { - specialProperties.releaseResourcesOnClose = ( "true".equals( entry.getValue() ) ); - } - else if ( AvailableSettings.SHARED_CACHE_MODE.equals( keyString ) ) { - specialProperties.sharedCacheMode = SharedCacheModeHelper.asSharedCacheMode( entry.getValue() ); - } - else if ( org.hibernate.cfg.AvailableSettings.METADATA_PROCESSING_ORDER.equals( keyString ) ) { - specialProperties.sourceProcessingOrder = interpretSourceProcessingOrder( entry.getValue() ); - } - else if ( org.hibernate.cfg.AvailableSettings.DEFAULT_CACHE_CONCURRENCY_STRATEGY.equals( keyString ) ) { - specialProperties.defaultCacheAccessType = interpretCacheAccessStrategy( entry.getValue() ); - } - else if ( org.hibernate.cfg.AvailableSettings.USE_NEW_ID_GENERATOR_MAPPINGS.equals( keyString ) ) { - specialProperties.useEnhancedGenerators = ConfigurationHelper.asBoolean( entry.getValue() ); - } - else if ( keyString.startsWith( AvailableSettings.CLASS_CACHE_PREFIX ) ) { - addCacheRegionDefinition( - keyString.substring( AvailableSettings.CLASS_CACHE_PREFIX.length() + 1 ), - (String) entry.getValue(), - CacheRegionDefinition.CacheRegionType.ENTITY - ); - } - else if ( keyString.startsWith( AvailableSettings.COLLECTION_CACHE_PREFIX ) ) { - addCacheRegionDefinition( - keyString.substring( AvailableSettings.COLLECTION_CACHE_PREFIX.length() + 1 ), - (String) entry.getValue(), - CacheRegionDefinition.CacheRegionType.COLLECTION - ); - } - else if ( keyString.startsWith( AvailableSettings.JACC_PREFIX ) - && ! ( keyString.equals( AvailableSettings.JACC_CONTEXT_ID ) - || keyString.equals( AvailableSettings.JACC_ENABLED ) ) ) { - addJaccDefinition( (String) entry.getKey(), entry.getValue() ); - } - } - - } - - return specialProperties; - } - - private void applyJdbcConnectionProperties(StandardServiceRegistryBuilder serviceRegistryBuilder) { - if ( persistenceUnit.getJtaDataSource() != null ) { - serviceRegistryBuilder.applySetting( Environment.DATASOURCE, persistenceUnit.getJtaDataSource() ); - } - else if ( persistenceUnit.getNonJtaDataSource() != null ) { - serviceRegistryBuilder.applySetting( Environment.DATASOURCE, persistenceUnit.getNonJtaDataSource() ); - } - else { - final String driver = (String) configurationValues.get( AvailableSettings.JDBC_DRIVER ); - if ( StringHelper.isNotEmpty( driver ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.DRIVER, driver ); - } - final String url = (String) configurationValues.get( AvailableSettings.JDBC_URL ); - if ( StringHelper.isNotEmpty( url ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.URL, url ); - } - final String user = (String) configurationValues.get( AvailableSettings.JDBC_USER ); - if ( StringHelper.isNotEmpty( user ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.USER, user ); - } - final String pass = (String) configurationValues.get( AvailableSettings.JDBC_PASSWORD ); - if ( StringHelper.isNotEmpty( pass ) ) { - serviceRegistryBuilder.applySetting( org.hibernate.cfg.AvailableSettings.PASS, pass ); - } - } - } - - private void applyTransactionProperties(StandardServiceRegistryBuilder serviceRegistryBuilder, SpecialProperties specialProperties) { - PersistenceUnitTransactionType txnType = PersistenceUnitTransactionTypeHelper.interpretTransactionType( - configurationValues.get( AvailableSettings.TRANSACTION_TYPE ) - ); - if ( txnType == null ) { - txnType = persistenceUnit.getTransactionType(); - } - if ( txnType == null ) { - // is it more appropriate to have this be based on bootstrap entry point (EE vs SE)? - txnType = PersistenceUnitTransactionType.RESOURCE_LOCAL; - } - specialProperties.jpaTransactionType = txnType; - boolean hasTxStrategy = configurationValues.containsKey( Environment.TRANSACTION_STRATEGY ); - if ( hasTxStrategy ) { - LOG.overridingTransactionStrategyDangerous( Environment.TRANSACTION_STRATEGY ); - } - else { - if ( txnType == PersistenceUnitTransactionType.JTA ) { - serviceRegistryBuilder.applySetting( Environment.TRANSACTION_STRATEGY, CMTTransactionFactory.class ); - } - else if ( txnType == PersistenceUnitTransactionType.RESOURCE_LOCAL ) { - serviceRegistryBuilder.applySetting( Environment.TRANSACTION_STRATEGY, JdbcTransactionFactory.class ); - } - } - } - - @SuppressWarnings("unchecked") - private T instantiateCustomClassFromConfiguration( - Object value, - Class type, - ServiceRegistry bootstrapServiceRegistry) { - if ( value == null ) { - return null; - } - - if ( type.isInstance( value ) ) { - return (T) value; - } - - final Class implementationClass; - - if ( Class.class.isInstance( value ) ) { - try { - implementationClass = (Class) value; - } - catch (ClassCastException e) { - throw persistenceException( - String.format( - "Specified implementation class [%s] was not of expected type [%s]", - ((Class) value).getName(), - type.getName() - ) - ); - } - } - else { - final String implementationClassName = value.toString(); - try { - implementationClass = bootstrapServiceRegistry.getService( ClassLoaderService.class ) - .classForName( implementationClassName ); - } - catch (ClassCastException e) { - throw persistenceException( - String.format( - "Specified implementation class [%s] was not of expected type [%s]", - implementationClassName, - type.getName() - ) - ); - } - } - - try { - return implementationClass.newInstance(); - } - catch (Exception e) { - throw persistenceException( - String.format( - "Unable to instantiate specified implementation class [%s]", - implementationClass.getName() - ), - e - ); - } - } - - @SuppressWarnings("unchecked") - private Class loadSessionInterceptorClass( - Object value, - BootstrapServiceRegistry bootstrapServiceRegistry) { - if ( value == null ) { - return null; - } - - Class theClass; - if ( Class.class.isInstance( value ) ) { - theClass = (Class) value; - } - else { - theClass = bootstrapServiceRegistry.getService( ClassLoaderService.class ).classForName( value.toString() ); - } - - try { - return (Class) theClass; - } - catch (ClassCastException e) { - throw persistenceException( - String.format( - "Specified Interceptor implementation class [%s] was not castable to Interceptor", - theClass.getName() - ) - ); - } - } - - private MetadataSourceProcessingOrder interpretSourceProcessingOrder(Object value) { - if ( value == null ) { - return null; - } - - if ( MetadataSourceProcessingOrder.class.isInstance( value ) ) { - return (MetadataSourceProcessingOrder) value; - } - else { - final String s = value.toString(); - final StringTokenizer tokenizer = new StringTokenizer( s, ",; ", false ); - final MetadataSourceType metadataSourceType = MetadataSourceType.parsePrecedence( tokenizer.nextToken() ); - return metadataSourceType == MetadataSourceType.CLASS - ? MetadataSourceProcessingOrder.ANNOTATIONS_FIRST - : MetadataSourceProcessingOrder.HBM_FIRST; - } - } - - private AccessType interpretCacheAccessStrategy(Object value) { - if ( value == null ) { - return null; - } - - if ( AccessType.class.isInstance( value ) ) { - return (AccessType) value; - } - else { - return AccessType.fromExternalName( value.toString() ); - } - } - - private String jaccContextId; - - private void addJaccDefinition(String key, Object value) { - if ( jaccContextId == null ) { - jaccContextId = (String) configurationValues.get( AvailableSettings.JACC_CONTEXT_ID ); - if ( jaccContextId == null ) { - throw persistenceException( - "Entities have been configured for JACC, but " - + AvailableSettings.JACC_CONTEXT_ID + " has not been set" - ); - } - } - - try { - final int roleStart = AvailableSettings.JACC_PREFIX.length() + 1; - final String role = key.substring( roleStart, key.indexOf( '.', roleStart ) ); - final int classStart = roleStart + role.length() + 1; - final String clazz = key.substring( classStart, key.length() ); - - final JaccDefinition def = new JaccDefinition( jaccContextId, role, clazz, (String) value ); - - jaccDefinitions.add( def ); - - } - catch ( IndexOutOfBoundsException e ) { - throw persistenceException( "Illegal usage of " + AvailableSettings.JACC_PREFIX + ": " + key ); - } - } - - private void addCacheRegionDefinition(String role, String value, CacheRegionDefinition.CacheRegionType cacheType) { - final StringTokenizer params = new StringTokenizer( value, ";, " ); - if ( !params.hasMoreTokens() ) { - StringBuilder error = new StringBuilder( "Illegal usage of " ); - if ( cacheType == CacheRegionDefinition.CacheRegionType.ENTITY ) { - error.append( AvailableSettings.CLASS_CACHE_PREFIX ) - .append( ": " ) - .append( AvailableSettings.CLASS_CACHE_PREFIX ); - } - else { - error.append( AvailableSettings.COLLECTION_CACHE_PREFIX ) - .append( ": " ) - .append( AvailableSettings.COLLECTION_CACHE_PREFIX ); - } - error.append( '.' ) - .append( role ) - .append( ' ' ) - .append( value ) - .append( ". Was expecting configuration, but found none" ); - throw persistenceException( error.toString() ); - } - - String usage = params.nextToken(); - String region = null; - if ( params.hasMoreTokens() ) { - region = params.nextToken(); - } - boolean lazyProperty = true; - if ( cacheType == CacheRegionDefinition.CacheRegionType.ENTITY ) { - if ( params.hasMoreTokens() ) { - lazyProperty = "all".equalsIgnoreCase( params.nextToken() ); - } - } - else { - lazyProperty = false; - } - - final CacheRegionDefinition def = new CacheRegionDefinition( cacheType, role, usage, region, lazyProperty ); - cacheRegionDefinitions.add( def ); - } - - @SuppressWarnings("UnnecessaryUnboxing") - private void prepareMetadataBuilder( - MetadataBuilder metadataBuilder, - SpecialProperties specialProperties) { - if ( specialProperties.namingStrategy != null ) { - metadataBuilder.with( specialProperties.namingStrategy ); - } - - if ( specialProperties.sourceProcessingOrder != null ) { - metadataBuilder.with( specialProperties.sourceProcessingOrder ); - } - - if ( specialProperties.useEnhancedGenerators != null ) { - metadataBuilder.withNewIdentifierGeneratorsEnabled( specialProperties.useEnhancedGenerators.booleanValue() ); - } - - if ( specialProperties.sharedCacheMode != null ) { - metadataBuilder.with( specialProperties.sharedCacheMode ); - } - - if ( specialProperties.defaultCacheAccessType != null ) { - metadataBuilder.with( specialProperties.defaultCacheAccessType ); - } - } - - private void prepareSessionFactoryBuilder(SessionFactoryBuilder builder, SpecialProperties specialProperties) { - if ( specialProperties.sessionFactoryInterceptor != null ) { - builder.with( specialProperties.sessionFactoryInterceptor ); - } - if ( specialProperties.entityNameResolver != null ) { - builder.add( specialProperties.entityNameResolver ); - } - if ( specialProperties.entityNotFoundDelegate != null ) { - builder.with( specialProperties.entityNotFoundDelegate ); - } - if ( specialProperties.sessionFactoryObserver != null ) { - builder.add( specialProperties.sessionFactoryObserver ); - } - if ( specialProperties.customEntityDirtinessStrategy != null ) { - builder.with( specialProperties.customEntityDirtinessStrategy ); - } - if ( specialProperties.currentTenantIdentifierResolver != null ) { - builder.with( specialProperties.currentTenantIdentifierResolver ); - } - } - - @SuppressWarnings("UnnecessaryUnboxing") - private Settings prepareEntitytManagerFactoryCreationSettings(SpecialProperties specialProperties) { - final SettingsImpl settings = new SettingsImpl(); - if ( specialProperties.releaseResourcesOnClose != null ) { - settings.setReleaseResourcesOnCloseEnabled( specialProperties.releaseResourcesOnClose.booleanValue() ); - } - if ( specialProperties.sessionInterceptorClass != null ) { - settings.setSessionInterceptorClass( specialProperties.sessionInterceptorClass ); - } - if ( specialProperties.jpaTransactionType != null ) { - settings.setTransactionType( specialProperties.jpaTransactionType ); - } - return settings; - } - - - private PersistenceException persistenceException(String message) { - return persistenceException( message, null ); - } - - private PersistenceException persistenceException(String message, Exception cause) { - return new PersistenceException( - getExceptionHeader() + message, - cause - ); - } - - private String getExceptionHeader() { - return "[PersistenceUnit: " + persistenceUnit.getName() + "] "; - } - - /** - * Aggregated return structure - */ - private static class SpecialProperties { - // affecting MetadataBuilder... - private NamingStrategy namingStrategy; - private MetadataSourceProcessingOrder sourceProcessingOrder; - private SharedCacheMode sharedCacheMode; - private AccessType defaultCacheAccessType; - private Boolean useEnhancedGenerators; - - // affecting SessionFactoryBuilder... - private Interceptor sessionFactoryInterceptor; - private SessionFactoryObserver sessionFactoryObserver; - private EntityNameResolver entityNameResolver; - private EntityNotFoundDelegate entityNotFoundDelegate = new JpaEntityNotFoundDelegate(); - private CustomEntityDirtinessStrategy customEntityDirtinessStrategy; - private CurrentTenantIdentifierResolver currentTenantIdentifierResolver; - - // affecting EntityManagerFactory building - private Boolean releaseResourcesOnClose; - private Class sessionInterceptorClass; - private PersistenceUnitTransactionType jpaTransactionType; - } - - private static class JpaEntityNotFoundDelegate implements EntityNotFoundDelegate, Serializable { - public void handleEntityNotFound(String entityName, Serializable id) { - throw new EntityNotFoundException( "Unable to find " + entityName + " with id " + id ); - } - } - - private static class ServiceRegistryCloser implements SessionFactoryObserver { - @Override - public void sessionFactoryCreated(SessionFactory sessionFactory) { - // nothing to do - } - - @Override - public void sessionFactoryClosed(SessionFactory sessionFactory) { - SessionFactoryImplementor sfi = ( (SessionFactoryImplementor) sessionFactory ); - sfi.getServiceRegistry().destroy(); - ServiceRegistry basicRegistry = sfi.getServiceRegistry().getParentServiceRegistry(); - ( (ServiceRegistryImplementor) basicRegistry ).destroy(); - } - } -} diff --git a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/Bootstrap.java b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/Bootstrap.java index 068f029a3d..5790901576 100644 --- a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/Bootstrap.java +++ b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/Bootstrap.java @@ -23,11 +23,11 @@ */ package org.hibernate.jpa.boot.spi; -import javax.persistence.spi.PersistenceUnitInfo; import java.util.Map; +import javax.persistence.spi.PersistenceUnitInfo; + import org.hibernate.jpa.boot.internal.EntityManagerFactoryBuilderImpl; -import org.hibernate.jpa.boot.internal.EntityManagerFactoryBuilderUsingMetamodelImpl; import org.hibernate.jpa.boot.internal.PersistenceUnitInfoDescriptor; /** @@ -74,19 +74,4 @@ public final class Bootstrap { Map integration) { return getEntityManagerFactoryBuilder( new PersistenceUnitInfoDescriptor( persistenceUnitInfo ), integration ); } - - /** - * Specifically builds and returns a EntityManagerFactoryBuilder that leverages the new metamodel codebase. - * Eventually this will be the normal operation of {@link #getEntityManagerFactoryBuilder(PersistenceUnitDescriptor, Map)}, - * but for now due to the incompleteness of the metamodel codebase, this is not integrated as the main way to - * build the EntityManagerFactoryBuilder. This allows tests in the nor-core modules to keep running. - * - * @deprecated This is a temporary method until metamodel codebase is more complete - */ - @Deprecated - public static EntityManagerFactoryBuilder getEntityManagerFactoryBuilderUsingMetamodel( - PersistenceUnitDescriptor persistenceUnitDescriptor, - Map integration) { - return new EntityManagerFactoryBuilderUsingMetamodelImpl( persistenceUnitDescriptor, integration ); - } } diff --git a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/EntityManagerFactoryBuilder.java b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/EntityManagerFactoryBuilder.java index 7975390866..abd50f6a94 100644 --- a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/EntityManagerFactoryBuilder.java +++ b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/boot/spi/EntityManagerFactoryBuilder.java @@ -24,8 +24,7 @@ package org.hibernate.jpa.boot.spi; import javax.persistence.EntityManagerFactory; - -import org.hibernate.cfg.Configuration; +import javax.sql.DataSource; /** * Represents a 2-phase JPA bootstrap process for building a Hibernate EntityManagerFactory. @@ -33,7 +32,7 @@ import org.hibernate.cfg.Configuration; * The first phase is the process of instantiating this builder. During the first phase, loading of Class references * is highly discouraged. * - * The second phase is building the EntityManagerFactory instance via {@link #buildEntityManagerFactory()}. + * The second phase is building the EntityManagerFactory instance via {@link #build}. * * If anything goes wrong during either phase and the bootstrap process needs to be aborted, {@link #cancel()} should * be called. @@ -42,12 +41,32 @@ import org.hibernate.cfg.Configuration; * @author Scott Marlow */ public interface EntityManagerFactoryBuilder { + /** + * Allows passing in a Java EE ValidatorFactory (delayed from constructing the builder, AKA phase 2) to be used + * in building the EntityManagerFactory + * + * @param validatorFactory The ValidatorFactory + * + * @return {@code this}, for method chaining + */ + public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory); + + /** + * Allows passing in a DataSource (delayed from constructing the builder, AKA phase 2) to be used + * in building the EntityManagerFactory + * + * @param dataSource The DataSource to use + * + * @return {@code this}, for method chaining + */ + public EntityManagerFactoryBuilder withDataSource(DataSource dataSource); + /** * Build {@link EntityManagerFactory} instance * * @return The built {@link EntityManagerFactory} */ - public EntityManagerFactory buildEntityManagerFactory(); + public EntityManagerFactory build(); /** * Cancel the building processing. This is used to signal the builder to release any resources in the case of diff --git a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/criteria/expression/function/TrimFunction.java b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/criteria/expression/function/TrimFunction.java index a2fdc8ca49..489c23feaf 100644 --- a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/criteria/expression/function/TrimFunction.java +++ b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/criteria/expression/function/TrimFunction.java @@ -37,6 +37,7 @@ import org.hibernate.jpa.criteria.expression.LiteralExpression; * Models the ANSI SQL TRIM function. * * @author Steve Ebersole + * @author Brett Meyer */ public class TrimFunction extends BasicFunctionExpression @@ -118,11 +119,22 @@ public class TrimFunction @Override public String render(RenderingContext renderingContext) { + String renderedTrimChar; + if ( trimCharacter.getClass().isAssignableFrom( + LiteralExpression.class ) ) { + // If the character is a literal, treat it as one. A few dialects + // do not support parameters as trim() arguments. + renderedTrimChar = ( ( LiteralExpression ) + trimCharacter ).getLiteral().toString(); + } else { + renderedTrimChar = ( (Renderable) trimCharacter ).render( + renderingContext ); + } return new StringBuilder() .append( "trim(" ) .append( trimspec.name() ) .append( ' ' ) - .append( ( (Renderable) trimCharacter ).render( renderingContext ) ) + .append( renderedTrimChar ) .append( " from " ) .append( ( (Renderable) trimSource ).render( renderingContext ) ) .append( ')' ) diff --git a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/event/spi/JpaIntegrator.java b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/event/spi/JpaIntegrator.java index df602b509c..1f9bd8c5a1 100644 --- a/hibernate-entitymanager/src/main/java/org/hibernate/jpa/event/spi/JpaIntegrator.java +++ b/hibernate-entitymanager/src/main/java/org/hibernate/jpa/event/spi/JpaIntegrator.java @@ -213,11 +213,13 @@ public class JpaIntegrator implements Integrator { } } + private static final String CDI_LISTENER_FACTORY_CLASS = "org.hibernate.jpa.event.internal.jpa.BeanManagerListenerFactory"; + private ListenerFactory buildBeanManagerListenerFactory(Object beanManagerRef) { try { // specifically using our classloader here... final Class beanManagerListenerFactoryClass = getClass().getClassLoader() - .loadClass( "org.hibernate.jpa.event.internal.jpa.BeanManagerListenerFactory" ); + .loadClass( CDI_LISTENER_FACTORY_CLASS ); final Method beanManagerListenerFactoryBuilderMethod = beanManagerListenerFactoryClass.getMethod( "fromBeanManagerReference", Object.class @@ -230,14 +232,14 @@ public class JpaIntegrator implements Integrator { throw e.getTargetException(); } } - catch (ReflectiveOperationException e) { - throw new HibernateException( "Could not access BeanManagerListenerFactory class to handle CDI extensions", e ); + catch (ClassNotFoundException e) { + throw new HibernateException( "Could not locate BeanManagerListenerFactory class to handle CDI extensions", e ); } - catch (RuntimeException e) { + catch (HibernateException e) { throw e; } catch (Throwable e) { - throw new HibernateException( "Problem calling BeanManagerListenerFactory class to handle CDI extensions", e ); + throw new HibernateException( "Could not access BeanManagerListenerFactory class to handle CDI extensions", e ); } } diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/BaseEntityManagerFunctionalTestCase.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/BaseEntityManagerFunctionalTestCase.java index 4fca26b4e3..1e971c640e 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/BaseEntityManagerFunctionalTestCase.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/BaseEntityManagerFunctionalTestCase.java @@ -93,7 +93,7 @@ public abstract class BaseEntityManagerFunctionalTestCase extends BaseUnitTestCa entityManagerFactory = (EntityManagerFactoryImpl) Bootstrap.getEntityManagerFactoryBuilder( buildPersistenceUnitDescriptor(), buildSettings() - ).buildEntityManagerFactory(); + ).build(); serviceRegistry = (StandardServiceRegistryImpl) entityManagerFactory.getSessionFactory() .getServiceRegistry() diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/TestingEntityManagerFactoryGenerator.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/TestingEntityManagerFactoryGenerator.java index 963d73bab3..0ecffdf0d8 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/TestingEntityManagerFactoryGenerator.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/TestingEntityManagerFactoryGenerator.java @@ -46,6 +46,6 @@ public class TestingEntityManagerFactoryGenerator { } public static EntityManagerFactory generateEntityManagerFactory(PersistenceUnitDescriptor descriptor, Map settings) { - return Bootstrap.getEntityManagerFactoryBuilder( descriptor, settings ).buildEntityManagerFactory(); + return Bootstrap.getEntityManagerFactoryBuilder( descriptor, settings ).build(); } } diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/cdi/NoCdiAvailableTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/cdi/NoCdiAvailableTest.java index 48b4cb9ddb..0ae009e8c9 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/cdi/NoCdiAvailableTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/cdi/NoCdiAvailableTest.java @@ -28,6 +28,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; +import org.hibernate.HibernateException; import org.hibernate.bytecode.spi.ByteCodeHelper; import org.junit.Rule; @@ -35,6 +36,7 @@ import org.junit.Test; import org.hibernate.testing.junit4.BaseUnitTestCase; import org.hibernate.testing.junit4.ClassLoadingIsolater; +import org.hibernate.testing.junit4.ExtraAssertions; import static org.junit.Assert.fail; @@ -125,14 +127,8 @@ public class NoCdiAvailableTest extends BaseUnitTestCase { mainMethod.invoke( null ); fail( "Expecting failure from missing CDI classes" ); } - catch (InvocationTargetException e) { - try { - throw e.getTargetException(); - } - catch (CdiClassLoadException expected) { - } -// catch (ClassCastException expectedAlt) { -// } + catch (InvocationTargetException expected) { + // hard to assert specific exception types due to classloader trickery } } } diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/Child.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/Child.java new file mode 100644 index 0000000000..2fb2142b6c --- /dev/null +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/Child.java @@ -0,0 +1,35 @@ +package org.hibernate.jpa.test.collection; + +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.ManyToOne; + +@Entity +public class Child { + + private Integer id; + private Parent daddy; + + public Child() { + + } + + @Id + public Integer getId() { + return id; + } + public void setId(Integer id) { + this.id = id; + } + @ManyToOne + public Parent getDaddy() { + return daddy; + } + public void setDaddy(Parent daddy) { + this.daddy = daddy; + } + + +} + + diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/Parent.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/Parent.java new file mode 100644 index 0000000000..ca0a5029ff --- /dev/null +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/Parent.java @@ -0,0 +1,50 @@ +package org.hibernate.jpa.test.collection; + +import java.util.HashSet; +import java.util.Set; + +import javax.persistence.CascadeType; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.PostLoad; +import javax.persistence.Transient; + +@Entity +public class Parent { + + private Integer id; + private Set children = new HashSet(); + private int nrOfChildren; + + public Parent() { + + } + + @Id + public Integer getId() { + return id; + } + public void setId(Integer id) { + this.id = id; + } + @OneToMany(mappedBy="daddy", fetch=FetchType.EAGER, cascade=CascadeType.ALL) + public Set getChildren() { + return children; + } + public void setChildren(Set children) { + this.children = children; + } + + @PostLoad + public void postLoad() { + nrOfChildren = children.size(); + } + + @Transient + public int getNrOfChildren() { + return nrOfChildren; + } +} + diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/PostLoadTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/PostLoadTest.java new file mode 100644 index 0000000000..da059f3059 --- /dev/null +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/collection/PostLoadTest.java @@ -0,0 +1,49 @@ +package org.hibernate.jpa.test.collection; + +import static org.junit.Assert.assertEquals; + +import java.util.HashSet; +import java.util.Set; + +import javax.persistence.EntityManager; + +import org.hibernate.jpa.test.BaseEntityManagerFunctionalTestCase; +import org.hibernate.testing.TestForIssue; +import org.junit.Test; + +@TestForIssue( jiraKey="HHH-6043" ) +public class PostLoadTest extends BaseEntityManagerFunctionalTestCase { + + /** + * Load an entity with a collection of associated entities, that uses a @PostLoad method to + * access the association. + */ + @Test + public void testAccessAssociatedSetInPostLoad() { + Child child = new Child(); + child.setId(1); + Parent daddy = new Parent(); + daddy.setId(1); + child.setDaddy(daddy); + Set children = new HashSet(); + children.add(child); + daddy.setChildren(children); + + EntityManager em = getOrCreateEntityManager(); + + em.getTransaction().begin(); + em.persist(daddy); + em.getTransaction().commit(); + em.clear(); + + daddy = em.find(Parent.class, 1); + assertEquals(1, daddy.getNrOfChildren()); + } + + @Override + protected Class[] getAnnotatedClasses() { + return new Class[] { Child.class, Parent.class }; + } +} + + diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/CriteriaCompilingTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/CriteriaCompilingTest.java index 666c189845..1dfddc7fa7 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/CriteriaCompilingTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/CriteriaCompilingTest.java @@ -92,8 +92,6 @@ public class CriteriaCompilingTest extends BaseEntityManagerFunctionalTestCase { } @Test - @RequiresDialect( DB2Dialect.class ) - @FailureExpected( jiraKey = "HHH-6655" ) public void testTrim() { final String expectedResult = "David R. Vincent"; EntityManager em = getOrCreateEntityManager(); diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/ManipulationCriteriaTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/ManipulationCriteriaTest.java index 7550ce2396..07690367dc 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/ManipulationCriteriaTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/ManipulationCriteriaTest.java @@ -23,6 +23,8 @@ */ package org.hibernate.jpa.test.criteria; +import static org.junit.Assert.fail; + import javax.persistence.EntityManager; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; @@ -31,14 +33,13 @@ import javax.persistence.criteria.CriteriaUpdate; import javax.persistence.criteria.Root; import javax.persistence.criteria.Subquery; +import org.hibernate.dialect.MySQLDialect; import org.hibernate.jpa.test.metamodel.AbstractMetamodelSpecificTest; import org.hibernate.jpa.test.metamodel.Customer; import org.hibernate.jpa.test.metamodel.Customer_; - +import org.hibernate.testing.SkipForDialect; import org.junit.Test; -import static org.junit.Assert.fail; - /** * @author Steve Ebersole */ @@ -174,6 +175,8 @@ public class ManipulationCriteriaTest extends AbstractMetamodelSpecificTest { } @Test + // MySQL does not allow "delete/update from" and subqueries to use the same table + @SkipForDialect(MySQLDialect.class) public void testDeleteWithUnCorrelatedSubquery() { CriteriaBuilder builder = entityManagerFactory().getCriteriaBuilder(); EntityManager em = getOrCreateEntityManager(); diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/QueryBuilderTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/QueryBuilderTest.java index 3986768b8a..25b0972907 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/QueryBuilderTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/QueryBuilderTest.java @@ -253,8 +253,8 @@ public class QueryBuilderTest extends BaseEntityManagerFunctionalTestCase { cb.function( "substring", String.class, - cb.literal( 1 ), root.get( Customer_.name ), + cb.literal( 1 ), cb.literal( 1 ) ), cb.literal( "a" ) diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/TreatKeywordTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/TreatKeywordTest.java index e6ebc33ea7..b1033b5c16 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/TreatKeywordTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/criteria/TreatKeywordTest.java @@ -76,7 +76,7 @@ public class TreatKeywordTest extends BaseEntityManagerFunctionalTestCase { criteria.where( builder.equal( builder.treat( root, Human.class ).get( "name" ), - 2 + "2" ) ); em.createQuery( criteria ).getResultList(); diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/InterceptorTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/InterceptorTest.java index 748509429f..8b18bbcbf1 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/InterceptorTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/InterceptorTest.java @@ -52,7 +52,7 @@ public class InterceptorTest { public void testConfiguredInterceptor() { Map settings = basicSettings(); settings.put( AvailableSettings.INTERCEPTOR, ExceptionInterceptor.class.getName() ); - EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).buildEntityManagerFactory(); + EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).build(); EntityManager em = emf.createEntityManager(); Item i = new Item(); i.setName( "Laptop" ); @@ -78,7 +78,7 @@ public class InterceptorTest { public void testConfiguredSessionInterceptor() { Map settings = basicSettings(); settings.put( AvailableSettings.SESSION_INTERCEPTOR, LocalExceptionInterceptor.class.getName() ); - EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).buildEntityManagerFactory(); + EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).build(); EntityManager em = emf.createEntityManager(); Item i = new Item(); i.setName( "Laptop" ); @@ -104,7 +104,7 @@ public class InterceptorTest { public void testEmptyCreateEntityManagerFactoryAndPropertyUse() { Map settings = basicSettings(); settings.put( AvailableSettings.INTERCEPTOR, ExceptionInterceptor.class.getName() ); - EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).buildEntityManagerFactory(); + EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).build(); EntityManager em = emf.createEntityManager(); Item i = new Item(); i.setName( "Laptop" ); @@ -130,7 +130,7 @@ public class InterceptorTest { public void testOnLoadCallInInterceptor() { Map settings = basicSettings(); settings.put( AvailableSettings.INTERCEPTOR, new ExceptionInterceptor( true ) ); - EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).buildEntityManagerFactory(); + EntityManagerFactory emf = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings ).build(); EntityManager em = emf.createEntityManager(); Item i = new Item(); i.setName( "Laptop" ); diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/PersisterClassProviderTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/PersisterClassProviderTest.java index 57311c6fdf..90a873f94c 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/PersisterClassProviderTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/PersisterClassProviderTest.java @@ -80,7 +80,7 @@ public class PersisterClassProviderTest { EntityManagerFactory entityManagerFactory = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitDescriptorAdapter(), settings - ).buildEntityManagerFactory(); + ).build(); entityManagerFactory.close(); } catch ( PersistenceException e ) { diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/SessionFactoryObserverTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/SessionFactoryObserverTest.java index 1ddfbccf0b..28de0b7ca7 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/SessionFactoryObserverTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/SessionFactoryObserverTest.java @@ -50,7 +50,7 @@ public class SessionFactoryObserverTest { ); try { - final EntityManagerFactory entityManagerFactory = builder.buildEntityManagerFactory(); + final EntityManagerFactory entityManagerFactory = builder.build(); entityManagerFactory.close(); Assert.fail( "GoofyException should have been thrown" ); } diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/id/IdentifierGeneratorStrategyProviderTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/id/IdentifierGeneratorStrategyProviderTest.java index 148ff31f58..6a07a01024 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/id/IdentifierGeneratorStrategyProviderTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/ejb3configuration/id/IdentifierGeneratorStrategyProviderTest.java @@ -51,7 +51,7 @@ public class IdentifierGeneratorStrategyProviderTest { final EntityManagerFactory entityManagerFactory = Bootstrap.getEntityManagerFactoryBuilder( new PersistenceUnitInfoAdapter(), settings - ).buildEntityManagerFactory(); + ).build(); final EntityManager entityManager = entityManagerFactory.createEntityManager(); try { diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/CreditCard.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/CreditCard.java index 3c73df707c..15bde1acd3 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/CreditCard.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/CreditCard.java @@ -94,7 +94,7 @@ public class CreditCard implements java.io.Serializable { number = v; } - @Column(name = "TYPE") + @Column(name = "`TYPE`") public String getType() { return type; } diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/Product.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/Product.java index c4028e98f7..b353911f50 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/Product.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/metamodel/Product.java @@ -135,6 +135,7 @@ public class Product implements java.io.Serializable { this.someBigInteger = someBigInteger; } + @Column( precision = 10, scale = 3) public BigDecimal getSomeBigDecimal() { return someBigDecimal; } diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/JarVisitorTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/JarVisitorTest.java index d0ae475032..6794708dd4 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/JarVisitorTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/JarVisitorTest.java @@ -23,6 +23,11 @@ */ package org.hibernate.jpa.test.packaging; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + import java.io.File; import java.io.IOException; import java.net.URL; @@ -30,12 +35,12 @@ import java.net.URLConnection; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; import java.util.Set; + import javax.persistence.Embeddable; import javax.persistence.Entity; import javax.persistence.MappedSuperclass; -import org.junit.Test; - +import org.hibernate.dialect.H2Dialect; import org.hibernate.jpa.packaging.internal.ClassFilter; import org.hibernate.jpa.packaging.internal.Entry; import org.hibernate.jpa.packaging.internal.ExplodedJarVisitor; @@ -47,20 +52,17 @@ import org.hibernate.jpa.packaging.internal.JarProtocolVisitor; import org.hibernate.jpa.packaging.internal.JarVisitor; import org.hibernate.jpa.packaging.internal.JarVisitorFactory; import org.hibernate.jpa.packaging.internal.PackageFilter; -import org.hibernate.jpa.test.pack.explodedpar.Carpet; import org.hibernate.jpa.test.pack.defaultpar.Version; - +import org.hibernate.jpa.test.pack.explodedpar.Carpet; +import org.hibernate.testing.RequiresDialect; import org.hibernate.testing.TestForIssue; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * @author Emmanuel Bernard * @author Hardy Ferentschik */ +@RequiresDialect( H2Dialect.class ) // Nothing dialect-specific -- no need to run in matrix. @SuppressWarnings("unchecked") public class JarVisitorTest extends PackagingTestCase { @Test @@ -226,6 +228,8 @@ public class JarVisitorTest extends PackagingTestCase { @Test @TestForIssue(jiraKey = "HHH-6806") public void testJarVisitorFactory() throws Exception{ + + addPackageToClasspath( buildExplodedPar(), buildDefaultPar() ); //setting URL to accept vfs based protocol URL.setURLStreamHandlerFactory(new URLStreamHandlerFactory() { diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/PackagingTestCase.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/PackagingTestCase.java index 77926e7892..12a82dcd76 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/PackagingTestCase.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/packaging/PackagingTestCase.java @@ -84,8 +84,15 @@ public abstract class PackagingTestCase extends BaseCoreFunctionalTestCase { URL myUrl = originalClassLoader.getResource( PackagingTestCase.class.getName().replace( '.', '/' ) + ".class" ); - // this is assuming that there is a target directory - int index = myUrl.getFile().lastIndexOf( "target" ); + int index; + if (myUrl.getFile().contains( "target" )) { + // assume there's normally a /target + index = myUrl.getFile().lastIndexOf( "target" ); + } else { + // if running in some IDEs, may be in /bin instead + index = myUrl.getFile().lastIndexOf( "bin" ); + } + if ( index == -1 ) { fail( "Unable to setup packaging test" ); } diff --git a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/query/QueryTest.java b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/query/QueryTest.java index bda02fba86..dea77ad3a0 100644 --- a/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/query/QueryTest.java +++ b/hibernate-entitymanager/src/test/java/org/hibernate/jpa/test/query/QueryTest.java @@ -24,6 +24,7 @@ package org.hibernate.jpa.test.query; import java.util.ArrayList; +import java.util.Arrays; import java.util.Date; import java.util.List; import javax.persistence.EntityManager; @@ -123,6 +124,43 @@ public class QueryTest extends BaseEntityManagerFunctionalTestCase { em.close(); } + @Test + @TestForIssue( jiraKey = "HHH_7407" ) + public void testMultipleParameterLists() throws Exception { + final Item item = new Item( "Mouse", "Micro$oft mouse" ); + final Item item2 = new Item( "Computer", "Dell computer" ); + + EntityManager em = getOrCreateEntityManager(); + em.getTransaction().begin(); + em.persist( item ); + em.persist( item2 ); + assertTrue( em.contains( item ) ); + em.getTransaction().commit(); + + List names = Arrays.asList( item.getName() ); + Query q = em.createQuery( "select item from Item item where item.name in :names or item.name in :names2" ); + q.setParameter( "names", names ); + q.setParameter( "names2", names ); + List result = q.getResultList(); + assertNotNull( result ); + assertEquals( 1, result.size() ); + + List descrs = Arrays.asList( item.getDescr() ); + q = em.createQuery( "select item from Item item where item.name in :names and ( item.descr is null or item.descr in :descrs )" ); + q.setParameter( "names", names ); + q.setParameter( "descrs", descrs ); + result = q.getResultList(); + assertNotNull( result ); + assertEquals( 1, result.size() ); + + em.getTransaction().begin(); + em.remove( em.getReference( Item.class, item.getName() ) ); + em.remove( em.getReference( Item.class, item2.getName() ) ); + em.getTransaction().commit(); + + em.close(); + } + @Test public void testParameterList() throws Exception { final Item item = new Item( "Mouse", "Micro$oft mouse" ); diff --git a/hibernate-entitymanager/src/test/resources/org/hibernate/jpa/test/xml/Qualifier.hbm.xml b/hibernate-entitymanager/src/test/resources/org/hibernate/jpa/test/xml/Qualifier.hbm.xml index eac8842edb..8cb8727530 100644 --- a/hibernate-entitymanager/src/test/resources/org/hibernate/jpa/test/xml/Qualifier.hbm.xml +++ b/hibernate-entitymanager/src/test/resources/org/hibernate/jpa/test/xml/Qualifier.hbm.xml @@ -17,7 +17,7 @@ - + diff --git a/hibernate-envers/hibernate-envers.gradle b/hibernate-envers/hibernate-envers.gradle index 5178f839de..28fa978cac 100644 --- a/hibernate-envers/hibernate-envers.gradle +++ b/hibernate-envers/hibernate-envers.gradle @@ -17,6 +17,16 @@ sourceSets { srcDir generatedJpaMetamodelSrcDir } } + test { + ext.enversDemoJavaDir = file( "src/demo/java" ) + ext.enversDemoResourcesDir = file( "src/demo/resources" ) + java { + srcDir enversDemoJavaDir + } + resources { + srcDir enversDemoResourcesDir + } + } } // Generate JPA2 static metamodel for default revision entities diff --git a/hibernate-envers/src/demo/java/org/hibernate/envers/demo/TestConsole.java b/hibernate-envers/src/demo/java/org/hibernate/envers/demo/TestConsole.java index a1c1ce463d..3dbda4e2d5 100644 --- a/hibernate-envers/src/demo/java/org/hibernate/envers/demo/TestConsole.java +++ b/hibernate-envers/src/demo/java/org/hibernate/envers/demo/TestConsole.java @@ -20,7 +20,6 @@ * Red Hat Author(s): Adam Warski */ package org.hibernate.envers.demo; -import java.io.File; import java.io.PrintStream; import java.util.HashMap; import java.util.HashSet; @@ -451,8 +450,6 @@ public class TestConsole { } public static void main(String[] args) { - String userDbFile = System.getProperty("java.io.tmpdir") + File.separator + "_versions_demo.db"; - Map configurationOverrides = new HashMap(); EntityManagerFactory emf = Persistence.createEntityManagerFactory("ConsolePU", configurationOverrides); EntityManager entityManager = emf.createEntityManager(); @@ -461,7 +458,10 @@ public class TestConsole { System.out.println(""); System.out.println("Welcome to EntityVersions demo!"); - System.out.println("HSQLDB database file location: " + userDbFile); +// If you would like to use HSQLDB, uncomment relevant entries in +// hibernate-envers/src/demo/resources/META-INF/persistence.xml descriptor and add required JAR libraries. +// String userDbFile = System.getProperty("java.io.tmpdir") + File.separator + "_versions_demo.db"; +// System.out.println("HSQLDB database file location: " + userDbFile); console.populateTestData(); console.start(); diff --git a/hibernate-envers/src/demo/resources/META-INF/persistence.xml b/hibernate-envers/src/demo/resources/META-INF/persistence.xml index 7540fed948..02a5ae532f 100644 --- a/hibernate-envers/src/demo/resources/META-INF/persistence.xml +++ b/hibernate-envers/src/demo/resources/META-INF/persistence.xml @@ -1,16 +1,22 @@ - org.hibernate.ejb.HibernatePersistence + org.hibernate.jpa.HibernatePersistenceProvider org.hibernate.envers.demo.Address org.hibernate.envers.demo.Person - + + + + + + + + + org.hibernate.envers.test.entities.customtype.UnspecifiedEnumTypeEntity$E1 + + + + + + + org.hibernate.envers.test.entities.customtype.UnspecifiedEnumTypeEntity$E2 + + + + \ No newline at end of file diff --git a/hibernate-envers/src/test/resources/mappings/oneToOne/bidirectional/eagerLoading.hbm.xml b/hibernate-envers/src/test/resources/mappings/oneToOne/bidirectional/eagerLoading.hbm.xml new file mode 100644 index 0000000000..1a0847a95f --- /dev/null +++ b/hibernate-envers/src/test/resources/mappings/oneToOne/bidirectional/eagerLoading.hbm.xml @@ -0,0 +1,45 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/hibernate-infinispan/hibernate-infinispan.gradle b/hibernate-infinispan/hibernate-infinispan.gradle index 6d108b1720..386d2e44d3 100644 --- a/hibernate-infinispan/hibernate-infinispan.gradle +++ b/hibernate-infinispan/hibernate-infinispan.gradle @@ -13,6 +13,7 @@ dependencies { testCompile( libraries.jnp_client ) testCompile( libraries.jnp_server ) testCompile( libraries.rhq ) + testCompile ('mysql:mysql-connector-java:5.1.17') } test { @@ -22,8 +23,20 @@ test { systemProperties['jgroups.udp.enable_bundling'] = false systemProperties['jgroups.bind_addr'] = 'localhost' // Use Infinispan's test JGroups stack that uses TEST_PING - systemProperties['hibernate.cache.infinispan.jgroups_cfg'] = 'stacks/tcp.xml' - // systemProperties['log4j.configuration'] = 'file:/log4j/log4j-infinispan.xml' + systemProperties['hibernate.cache.infinispan.jgroups_cfg'] = '2lc-test-tcp.xml' + // systemProperties['log4j.configuration'] = 'file:/log4j/log4j-infinispan.xml' enabled = true } +task packageTests(type: Jar) { + from sourceSets.test.output + classifier = 'tests' +} + +task sourcesTestJar(type: Jar, dependsOn:classes) { + from sourceSets.test.allSource + classifier = 'test-sources' +} + +artifacts.archives packageTests +artifacts.archives sourcesTestJar diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/InfinispanRegionFactory.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/InfinispanRegionFactory.java index 23d6d3014e..ad3cc71481 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/InfinispanRegionFactory.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/InfinispanRegionFactory.java @@ -1,6 +1,7 @@ package org.hibernate.cache.infinispan; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -9,15 +10,24 @@ import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; -import javax.transaction.TransactionManager; +import java.util.concurrent.TimeUnit; +import org.hibernate.cache.infinispan.timestamp.ClusteredTimestampsRegionImpl; +import org.hibernate.cache.infinispan.util.Caches; import org.infinispan.AdvancedCache; -import org.infinispan.Cache; import org.infinispan.commands.module.ModuleCommandFactory; -import org.infinispan.config.Configuration; +import org.infinispan.configuration.cache.CacheMode; +import org.infinispan.configuration.cache.Configuration; +import org.infinispan.configuration.cache.ConfigurationBuilder; import org.infinispan.factories.GlobalComponentRegistry; import org.infinispan.manager.DefaultCacheManager; import org.infinispan.manager.EmbeddedCacheManager; +import org.infinispan.transaction.TransactionMode; +import org.infinispan.transaction.lookup.GenericTransactionManagerLookup; +import org.infinispan.util.concurrent.IsolationLevel; +import org.infinispan.configuration.parsing.ConfigurationBuilderHolder; +import org.infinispan.configuration.parsing.ParserRegistry; +import org.infinispan.util.FileLookupFactory; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; @@ -34,8 +44,6 @@ import org.hibernate.cache.infinispan.query.QueryResultsRegionImpl; import org.hibernate.cache.infinispan.timestamp.TimestampTypeOverrides; import org.hibernate.cache.infinispan.timestamp.TimestampsRegionImpl; import org.hibernate.cache.infinispan.tm.HibernateTransactionManagerLookup; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.CacheAdapterImpl; import org.hibernate.cache.spi.CollectionRegion; import org.hibernate.cache.spi.EntityRegion; import org.hibernate.cache.spi.NaturalIdRegion; @@ -169,6 +177,11 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { */ public static final boolean DEF_USE_SYNCHRONIZATION = true; + /** + * Name of the pending puts cache. + */ + public static final String PENDING_PUTS_CACHE_NAME = "pending-puts"; + private EmbeddedCacheManager manager; private final Map typeOverrides = new HashMap(); @@ -177,16 +190,14 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { private org.infinispan.transaction.lookup.TransactionManagerLookup transactionManagerlookup; - private TransactionManager transactionManager; - private List regionNames = new ArrayList(); /** {@inheritDoc} */ public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { if (log.isDebugEnabled()) log.debug("Building collection cache region [" + regionName + "]"); AdvancedCache cache = getCache(regionName, COLLECTION_KEY, properties); - CacheAdapter cacheAdapter = CacheAdapterImpl.newInstance(cache); - CollectionRegionImpl region = new CollectionRegionImpl(cacheAdapter, regionName, metadata, transactionManager, this); + CollectionRegionImpl region = new CollectionRegionImpl( + cache, regionName, metadata, this); startRegion(region, regionName); return region; } @@ -195,8 +206,8 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { if (log.isDebugEnabled()) log.debug("Building entity cache region [" + regionName + "]"); AdvancedCache cache = getCache(regionName, ENTITY_KEY, properties); - CacheAdapter cacheAdapter = CacheAdapterImpl.newInstance(cache); - EntityRegionImpl region = new EntityRegionImpl(cacheAdapter, regionName, metadata, transactionManager, this); + EntityRegionImpl region = new EntityRegionImpl( + cache, regionName, metadata, this); startRegion(region, regionName); return region; } @@ -204,19 +215,13 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { @Override public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { - if ( log.isDebugEnabled() ) { - log.debug( "Building natural id cache region [" + regionName + "]" ); + if (log.isDebugEnabled()) { + log.debug("Building natural id cache region [" + regionName + "]"); } - AdvancedCache cache = getCache( regionName, NATURAL_ID_KEY, properties ); - CacheAdapter cacheAdapter = CacheAdapterImpl.newInstance( cache ); + AdvancedCache cache = getCache(regionName, NATURAL_ID_KEY, properties); NaturalIdRegionImpl region = new NaturalIdRegionImpl( - cacheAdapter, - regionName, - metadata, - transactionManager, - this - ); - startRegion( region, regionName ); + cache, regionName, metadata, this); + startRegion(region, regionName); return region; } @@ -232,8 +237,8 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { cacheName = regionName; AdvancedCache cache = getCache(cacheName, QUERY_KEY, properties); - CacheAdapter cacheAdapter = CacheAdapterImpl.newInstance(cache); - QueryResultsRegionImpl region = new QueryResultsRegionImpl(cacheAdapter, regionName, properties, transactionManager, this); + QueryResultsRegionImpl region = new QueryResultsRegionImpl( + cache, regionName, this); startRegion(region, regionName); return region; } @@ -245,14 +250,17 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { throws CacheException { if (log.isDebugEnabled()) log.debug("Building timestamps cache region [" + regionName + "]"); AdvancedCache cache = getCache(regionName, TIMESTAMPS_KEY, properties); - CacheAdapter cacheAdapter = CacheAdapterImpl.newInstance(cache); - TimestampsRegionImpl region = createTimestampsRegion(cacheAdapter, regionName); + TimestampsRegionImpl region = createTimestampsRegion(cache, regionName); startRegion(region, regionName); return region; } - protected TimestampsRegionImpl createTimestampsRegion(CacheAdapter cacheAdapter, String regionName) { - return new TimestampsRegionImpl(cacheAdapter, regionName, transactionManager, this); + protected TimestampsRegionImpl createTimestampsRegion( + AdvancedCache cache, String regionName) { + if (Caches.isClustered(cache)) + return new ClusteredTimestampsRegionImpl(cache, regionName, this); + else + return new TimestampsRegionImpl(cache, regionName, this); } /** @@ -287,8 +295,6 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { log.debug("Starting Infinispan region factory"); try { transactionManagerlookup = createTransactionManagerLookup( getServiceRegistry() ); - transactionManager = transactionManagerlookup.getTransactionManager(); - manager = createCacheManager(); initGenericDataTypeOverrides(); ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class ); Map settings = configurationService.getSettings(); @@ -298,7 +304,9 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { dissectProperty(prefixLoc, key.toString(), settings); } } + manager = createCacheManager(settings); defineGenericDataTypeCacheConfigurations( settings); + definePendingPutsCache(); } catch (CacheException ce) { throw ce; } catch (Throwable t) { @@ -314,6 +322,22 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { start(); } + private void definePendingPutsCache() { + ConfigurationBuilder builder = new ConfigurationBuilder(); + // A local, lightweight cache for pending puts, which is + // non-transactional and has aggressive expiration settings. + // Locking is still required since the putFromLoad validator + // code uses conditional operations (i.e. putIfAbsent). + builder.clustering().cacheMode(CacheMode.LOCAL) + .transaction().transactionMode(TransactionMode.NON_TRANSACTIONAL) + .expiration().maxIdle(TimeUnit.SECONDS.toMillis(60)) + .storeAsBinary().enabled(false) + .locking().isolationLevel(IsolationLevel.READ_COMMITTED) + .jmxStatistics().disable(); + + manager.defineConfiguration(PENDING_PUTS_CACHE_NAME, builder.build()); + } + protected org.infinispan.transaction.lookup.TransactionManagerLookup createTransactionManagerLookup( ServiceRegistry sr) { return new HibernateTransactionManagerLookup(sr); @@ -332,7 +356,8 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { protected void stopCacheRegions() { log.debug("Clear region references"); - getCacheCommandFactory(manager.getCache()).clearRegions(regionNames); + getCacheCommandFactory(manager.getCache().getAdvancedCache()) + .clearRegions(regionNames); regionNames.clear(); } @@ -355,34 +380,37 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { return Collections.unmodifiableSet(definedConfigurations); } - protected EmbeddedCacheManager createCacheManager() throws CacheException { - try { - ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class ); - String configLoc = configurationService.getSetting( - INFINISPAN_CONFIG_RESOURCE_PROP, - StandardConverters.STRING, DEF_INFINISPAN_CONFIG_RESOURCE - ); - EmbeddedCacheManager manager = new DefaultCacheManager( configLoc, false ); - Boolean globalStats = configurationService.getSetting( - INFINISPAN_GLOBAL_STATISTICS_PROP, - StandardConverters.BOOLEAN, - false - ); - if ( globalStats ) { - manager.getGlobalConfiguration().fluent().globalJmxStatistics(); - } - manager.start(); - return manager; - } - catch ( IOException e ) { - throw new CacheException( "Unable to create default cache manager", e ); - } + protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException { + try { + String configLoc = ConfigurationHelper.getString( + INFINISPAN_CONFIG_RESOURCE_PROP, properties, DEF_INFINISPAN_CONFIG_RESOURCE); + ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader(); + InputStream is = FileLookupFactory.newInstance().lookupFileStrict( + configLoc, ctxClassLoader); + ParserRegistry parserRegistry = new ParserRegistry(ctxClassLoader); + ConfigurationBuilderHolder holder = parserRegistry.parse(is); + + // Override global jmx statistics exposure + String globalStats = extractProperty( + INFINISPAN_GLOBAL_STATISTICS_PROP, properties); + if (globalStats != null) + holder.getGlobalConfigurationBuilder().globalJmxStatistics() + .enabled(Boolean.parseBoolean(globalStats)); + + return createCacheManager(holder); + } catch (IOException e) { + throw new CacheException("Unable to create default cache manager", e); + } + } + + protected EmbeddedCacheManager createCacheManager( + ConfigurationBuilderHolder holder) { + return new DefaultCacheManager(holder, true); } private void startRegion(BaseRegion region, String regionName) { regionNames.add(regionName); - getCacheCommandFactory(region.getCacheAdapter().getCache()) - .addRegion(regionName, region); + getCacheCommandFactory(region.getCache()).addRegion(regionName, region); } public void initGenericDataTypeOverrides() { @@ -443,45 +471,59 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { return cfgOverride; } - private void defineGenericDataTypeCacheConfigurations(Map properties) throws CacheException { + private void defineGenericDataTypeCacheConfigurations(Map properties) { String[] defaultGenericDataTypes = new String[]{ENTITY_KEY, COLLECTION_KEY, TIMESTAMPS_KEY, QUERY_KEY}; for (String type : defaultGenericDataTypes) { TypeOverrides override = overrideStatisticsIfPresent(typeOverrides.get(type), properties); String cacheName = override.getCacheName(); - Configuration newCacheCfg = override.createInfinispanConfiguration(); + ConfigurationBuilder builder = new ConfigurationBuilder(); + // Read base configuration + applyConfiguration(cacheName, builder); + // Apply overrides - Configuration cacheConfig = manager.defineConfiguration(cacheName, cacheName, newCacheCfg); + override.applyTo(builder); // Configure transaction manager - cacheConfig = configureTransactionManager(cacheConfig, cacheName, properties); - manager.defineConfiguration(cacheName, cacheName, cacheConfig); + configureTransactionManager(builder, cacheName, properties); + // Define configuration, validate and then apply + Configuration cfg = builder.build(); + override.validateInfinispanConfiguration(cfg); + manager.defineConfiguration(cacheName, cfg); definedConfigurations.add(cacheName); - override.validateInfinispanConfiguration(cacheConfig); } } private AdvancedCache getCache(String regionName, String typeKey, Properties properties) { TypeOverrides regionOverride = typeOverrides.get(regionName); if (!definedConfigurations.contains(regionName)) { - String templateCacheName = null; - Configuration regionCacheCfg = null; + String templateCacheName; + Configuration regionCacheCfg; + ConfigurationBuilder builder = new ConfigurationBuilder(); if (regionOverride != null) { if (log.isDebugEnabled()) log.debug("Cache region specific configuration exists: " + regionOverride); - regionOverride = overrideStatisticsIfPresent(regionOverride, properties); - regionCacheCfg = regionOverride.createInfinispanConfiguration(); String cacheName = regionOverride.getCacheName(); if (cacheName != null) // Region specific override with a given cache name - templateCacheName = cacheName; + templateCacheName = cacheName; else // Region specific override without cache name, so template cache name is generic for data type. - templateCacheName = typeOverrides.get(typeKey).getCacheName(); + templateCacheName = typeOverrides.get(typeKey).getCacheName(); + + // Read template configuration + applyConfiguration(templateCacheName, builder); + + regionOverride = overrideStatisticsIfPresent(regionOverride, properties); + regionOverride.applyTo(builder); + } else { // No region specific overrides, template cache name is generic for data type. templateCacheName = typeOverrides.get(typeKey).getCacheName(); - regionCacheCfg = typeOverrides.get(typeKey).createInfinispanConfiguration(); + // Read template configuration + builder.read(manager.getCacheConfiguration(templateCacheName)); + // Apply overrides + typeOverrides.get(typeKey).applyTo(builder); } // Configure transaction manager - regionCacheCfg = configureTransactionManager(regionCacheCfg, templateCacheName, properties); - // Apply overrides - manager.defineConfiguration(regionName, templateCacheName, regionCacheCfg); + configureTransactionManager(builder, templateCacheName, properties); + // Define configuration + manager.defineConfiguration(regionName, builder.build()); definedConfigurations.add(regionName); } AdvancedCache cache = manager.getCache(regionName).getAdvancedCache(); @@ -491,11 +533,20 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { return createCacheWrapper(cache); } - private CacheCommandFactory getCacheCommandFactory(Cache cache) { - GlobalComponentRegistry globalCr = cache.getAdvancedCache() - .getComponentRegistry().getGlobalComponentRegistry(); + private void applyConfiguration(String cacheName, ConfigurationBuilder builder) { + Configuration cfg = manager.getCacheConfiguration(cacheName); + if (cfg != null) + builder.read(cfg); + } + + private CacheCommandFactory getCacheCommandFactory(AdvancedCache cache) { + GlobalComponentRegistry globalCr = cache.getComponentRegistry() + .getGlobalComponentRegistry(); + Map factories = - (Map) globalCr.getComponent("org.infinispan.modules.command.factories"); + (Map) globalCr + .getComponent("org.infinispan.modules.command.factories"); + for (ModuleCommandFactory factory : factories.values()) { if (factory instanceof CacheCommandFactory) return (CacheCommandFactory) factory; @@ -507,28 +558,37 @@ public class InfinispanRegionFactory extends AbstractRegionFactory { } protected AdvancedCache createCacheWrapper(AdvancedCache cache) { - return new ClassLoaderAwareCache(cache, Thread.currentThread().getContextClassLoader()); + if (Caches.isClustered(cache)) + return new ClassLoaderAwareCache(cache, + Thread.currentThread().getContextClassLoader()); + + return cache; } - private Configuration configureTransactionManager(Configuration regionOverrides, String templateCacheName, Map properties) { + private void configureTransactionManager(ConfigurationBuilder builder, + String cacheName, Map properties) { // Get existing configuration to verify whether a tm was configured or not. - Configuration templateConfig = manager.defineConfiguration(templateCacheName, new Configuration()); - if (templateConfig.isTransactionalCache()) { - String ispnTmLookupClassName = templateConfig.getTransactionManagerLookupClass(); + Configuration baseCfg = manager.getCacheConfiguration(cacheName); + if (baseCfg != null && baseCfg.transaction().transactionMode().isTransactional()) { + String ispnTmLookupClassName = baseCfg.transaction().transactionManagerLookup().getClass().getName(); String hbTmLookupClassName = org.hibernate.cache.infinispan.tm.HibernateTransactionManagerLookup.class.getName(); - if (ispnTmLookupClassName != null && !ispnTmLookupClassName.equals(hbTmLookupClassName)) { + if (GenericTransactionManagerLookup.class.getName().equals(ispnTmLookupClassName)) { + log.debug("Using default Infinispan transaction manager lookup " + + "instance (GenericTransactionManagerLookup), overriding it " + + "with Hibernate transaction manager lookup"); + builder.transaction().transactionManagerLookup(transactionManagerlookup); + } else if (ispnTmLookupClassName != null && !ispnTmLookupClassName.equals(hbTmLookupClassName)) { log.debug("Infinispan is configured [" + ispnTmLookupClassName + "] with a different transaction manager lookup " + "class than Hibernate [" + hbTmLookupClassName + "]"); } else { - regionOverrides.fluent().transactionManagerLookup( transactionManagerlookup ); + // Infinispan TM lookup class null, so apply Hibernate one directly + builder.transaction().transactionManagerLookup(transactionManagerlookup); } String useSyncProp = extractProperty(INFINISPAN_USE_SYNCHRONIZATION_PROP, properties); boolean useSync = useSyncProp == null ? DEF_USE_SYNCHRONIZATION : Boolean.parseBoolean(useSyncProp); - regionOverrides.fluent().transaction().useSynchronization(useSync); + builder.transaction().useSynchronization(useSync); } - - return regionOverrides; } private TypeOverrides overrideStatisticsIfPresent(TypeOverrides override, Map properties) { diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/JndiInfinispanRegionFactory.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/JndiInfinispanRegionFactory.java index 6e500b59b6..068e4dea1a 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/JndiInfinispanRegionFactory.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/JndiInfinispanRegionFactory.java @@ -21,12 +21,13 @@ */ package org.hibernate.cache.infinispan; -import org.infinispan.manager.EmbeddedCacheManager; +import java.util.Map; import org.hibernate.cache.CacheException; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.config.spi.StandardConverters; import org.hibernate.engine.jndi.spi.JndiService; +import org.infinispan.manager.EmbeddedCacheManager; /** * A {@link org.hibernate.cache.spi.RegionFactory} for Infinispan-backed cache @@ -42,7 +43,7 @@ public class JndiInfinispanRegionFactory extends InfinispanRegionFactory { */ public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager"; @Override - protected EmbeddedCacheManager createCacheManager() throws CacheException { + protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException { String name = getServiceRegistry().getService( ConfigurationService.class ).getSetting( CACHE_MANAGER_RESOURCE_PROP, StandardConverters.STRING diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/TypeOverrides.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/TypeOverrides.java index d6fcbbb6d4..e4ccc1a002 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/TypeOverrides.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/TypeOverrides.java @@ -25,7 +25,8 @@ import java.util.HashSet; import java.util.Locale; import java.util.Set; -import org.infinispan.config.Configuration; +import org.infinispan.configuration.cache.Configuration; +import org.infinispan.configuration.cache.ConfigurationBuilder; import org.infinispan.eviction.EvictionStrategy; import org.hibernate.cache.CacheException; @@ -118,25 +119,23 @@ public class TypeOverrides { this.isExposeStatistics = isExposeStatistics; } - public Configuration createInfinispanConfiguration() { - Configuration cacheCfg = new Configuration(); + public void applyTo(ConfigurationBuilder builder) { if (overridden.contains("evictionStrategy")) - cacheCfg.fluent().eviction().strategy(evictionStrategy); + builder.eviction().strategy(evictionStrategy); if (overridden.contains("evictionWakeUpInterval")) - cacheCfg.fluent().expiration().wakeUpInterval(evictionWakeUpInterval); + builder.expiration().wakeUpInterval(evictionWakeUpInterval); if (overridden.contains("evictionMaxEntries")) - cacheCfg.fluent().eviction().maxEntries(evictionMaxEntries); + builder.eviction().maxEntries(evictionMaxEntries); if (overridden.contains("expirationLifespan")) - cacheCfg.fluent().expiration().lifespan(expirationLifespan); + builder.expiration().lifespan(expirationLifespan); if (overridden.contains("expirationMaxIdle")) - cacheCfg.fluent().expiration().maxIdle(expirationMaxIdle); + builder.expiration().maxIdle(expirationMaxIdle); if (overridden.contains("isExposeStatistics") && isExposeStatistics) - cacheCfg.fluent().jmxStatistics(); - return cacheCfg; + builder.jmxStatistics().enable(); } - public void validateInfinispanConfiguration(Configuration configuration) throws CacheException { - // no-op + public void validateInfinispanConfiguration(Configuration cfg) throws CacheException { + // no-op, method overriden } @Override diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/PutFromLoadValidator.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/PutFromLoadValidator.java index 2140a37689..9f07d706f4 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/PutFromLoadValidator.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/PutFromLoadValidator.java @@ -23,7 +23,6 @@ */ package org.hibernate.cache.infinispan.access; -import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -38,6 +37,9 @@ import javax.transaction.Transaction; import javax.transaction.TransactionManager; import org.hibernate.cache.CacheException; +import org.hibernate.cache.infinispan.InfinispanRegionFactory; +import org.infinispan.AdvancedCache; +import org.infinispan.manager.EmbeddedCacheManager; /** * Encapsulates logic to allow a {@link TransactionalAccessDelegate} to determine @@ -91,42 +93,19 @@ public class PutFromLoadValidator { */ public static final long NAKED_PUT_INVALIDATION_PERIOD = TimeUnit.SECONDS.toMillis(20); - /** Period (in ms) after which a pending put is placed in the over-age queue */ - private static final long PENDING_PUT_OVERAGE_PERIOD = TimeUnit.SECONDS.toMillis(5); - - /** Period (in ms) before which we stop trying to clean out pending puts */ - private static final long PENDING_PUT_RECENT_PERIOD = TimeUnit.SECONDS.toMillis(2); - - /** Period (in ms) after which a pending put is never expected to come in and should be cleaned */ - private static final long MAX_PENDING_PUT_DELAY = TimeUnit.SECONDS.toMillis(2 * 60); - /** * Used to determine whether the owner of a pending put is a thread or a transaction */ private final TransactionManager transactionManager; private final long nakedPutInvalidationPeriod; - private final long pendingPutOveragePeriod; - private final long pendingPutRecentPeriod; - private final long maxPendingPutDelay; /** * Registry of expected, future, isPutValid calls. If a key+owner is registered in this map, it * is not a "naked put" and is allowed to proceed. */ - private final ConcurrentMap pendingPuts = new ConcurrentHashMap(); - /** - * List of pending puts. Used to ensure we don't leak memory via the pendingPuts map - */ - private final List> pendingQueue = new LinkedList>(); - /** - * Separate list of pending puts that haven't been resolved within PENDING_PUT_OVERAGE_PERIOD. - * Used to ensure we don't leak memory via the pendingPuts map. Tracked separately from more - * recent pending puts for efficiency reasons. - */ - private final List> overagePendingQueue = new LinkedList>(); - /** Lock controlling access to pending put queues */ - private final Lock pendingLock = new ReentrantLock(); + private final ConcurrentMap pendingPuts; + private final ConcurrentMap recentRemovals = new ConcurrentHashMap(); /** * List of recent removals. Used to ensure we don't leak memory via the recentRemovals map @@ -148,27 +127,26 @@ public class PutFromLoadValidator { /** * Creates a new PutFromLoadValidator. - * - * @param transactionManager - * transaction manager to use to associate changes with a transaction; may be - * null */ - public PutFromLoadValidator(TransactionManager transactionManager) { - this(transactionManager, NAKED_PUT_INVALIDATION_PERIOD, PENDING_PUT_OVERAGE_PERIOD, - PENDING_PUT_RECENT_PERIOD, MAX_PENDING_PUT_DELAY); + public PutFromLoadValidator(AdvancedCache cache) { + this(cache, NAKED_PUT_INVALIDATION_PERIOD); } /** * Constructor variant for use by unit tests; allows control of various timeouts by the test. */ - protected PutFromLoadValidator(TransactionManager transactionManager, - long nakedPutInvalidationPeriod, long pendingPutOveragePeriod, - long pendingPutRecentPeriod, long maxPendingPutDelay) { - this.transactionManager = transactionManager; + public PutFromLoadValidator(AdvancedCache cache, + long nakedPutInvalidationPeriod) { + this(cache.getCacheManager(), cache.getTransactionManager(), + nakedPutInvalidationPeriod); + } + + public PutFromLoadValidator(EmbeddedCacheManager cacheManager, + TransactionManager tm, long nakedPutInvalidationPeriod) { + this.pendingPuts = cacheManager + .getCache(InfinispanRegionFactory.PENDING_PUTS_CACHE_NAME); + this.transactionManager = tm; this.nakedPutInvalidationPeriod = nakedPutInvalidationPeriod; - this.pendingPutOveragePeriod = pendingPutOveragePeriod; - this.pendingPutRecentPeriod = pendingPutRecentPeriod; - this.maxPendingPutDelay = maxPendingPutDelay; } // ----------------------------------------------------------------- Public @@ -191,10 +169,6 @@ public class PutFromLoadValidator { boolean locked = false; long now = System.currentTimeMillis(); - // Important: Do cleanup before we acquire any locks so we - // don't deadlock with invalidateRegion - cleanOutdatedPendingPuts(now, true); - try { PendingPutMap pending = pendingPuts.get(key); if (pending != null) { @@ -233,9 +207,6 @@ public class PutFromLoadValidator { } } catch (Throwable t) { - - valid = false; - if (locked) { PendingPutMap toRelease = pendingPuts.get(key); if (toRelease != null) { @@ -283,7 +254,6 @@ public class PutFromLoadValidator { * caller should treat as an exception condition) */ public boolean invalidateKey(Object key) { - boolean success = true; // Invalidate any pending puts @@ -330,7 +300,7 @@ public class PutFromLoadValidator { Long cleaned = recentRemovals.get(toClean.key); if (cleaned != null && cleaned.equals(toClean.timestamp)) { cleaned = recentRemovals.remove(toClean.key); - if (cleaned != null && cleaned.equals(toClean.timestamp) == false) { + if (cleaned != null && !cleaned.equals(toClean.timestamp)) { // Oops; removed the wrong timestamp; restore it recentRemovals.putIfAbsent(toClean.key, cleaned); } @@ -405,13 +375,14 @@ public class PutFromLoadValidator { * @param key key that will be used for subsequent cache put */ public void registerPendingPut(Object key) { - PendingPut pendingPut = new PendingPut(key, getOwnerForPut()); + PendingPut pendingPut = new PendingPut(getOwnerForPut()); PendingPutMap pendingForKey = new PendingPutMap(pendingPut); for (;;) { PendingPutMap existing = pendingPuts.putIfAbsent(key, pendingForKey); if (existing != null) { if (existing.acquireLock(10, TimeUnit.SECONDS)) { + try { existing.put(pendingPut); PendingPutMap doublecheck = pendingPuts.putIfAbsent(key, existing); @@ -432,33 +403,10 @@ public class PutFromLoadValidator { break; } } - - // Guard against memory leaks - preventOutdatedPendingPuts(pendingPut); } // -------------------------------------------------------------- Protected - /** Only for use by unit tests; may be removed at any time */ - protected int getPendingPutQueueLength() { - pendingLock.lock(); - try { - return pendingQueue.size(); - } finally { - pendingLock.unlock(); - } - } - - /** Only for use by unit tests; may be removed at any time */ - protected int getOveragePendingPutQueueLength() { - pendingLock.lock(); - try { - return overagePendingQueue.size(); - } finally { - pendingLock.unlock(); - } - } - /** Only for use by unit tests; may be removed at any time */ protected int getRemovalQueueLength() { removalsLock.lock(); @@ -484,119 +432,6 @@ public class PutFromLoadValidator { } - private void preventOutdatedPendingPuts(PendingPut pendingPut) { - pendingLock.lock(); - try { - pendingQueue.add(new WeakReference(pendingPut)); - if (pendingQueue.size() > 1) { - cleanOutdatedPendingPuts(pendingPut.timestamp, false); - } - } finally { - pendingLock.unlock(); - } - } - - private void cleanOutdatedPendingPuts(long now, boolean lock) { - - PendingPut toClean = null; - if (lock) { - pendingLock.lock(); - } - try { - // Clean items out of the basic queue - long overaged = now - this.pendingPutOveragePeriod; - long recent = now - this.pendingPutRecentPeriod; - - int pos = 0; - while (pendingQueue.size() > pos) { - WeakReference ref = pendingQueue.get(pos); - PendingPut item = ref.get(); - if (item == null || item.completed) { - pendingQueue.remove(pos); - } else if (item.timestamp < overaged) { - // Potential leak; move to the overaged queued - pendingQueue.remove(pos); - overagePendingQueue.add(ref); - } else if (item.timestamp >= recent) { - // Don't waste time on very recent items - break; - } else if (pos > 2) { - // Don't spend too much time getting nowhere - break; - } else { - // Move on to the next item - pos++; - } - } - - // Process the overage queue until we find an item to clean - // or an incomplete item that hasn't aged out - long mustCleanTime = now - this.maxPendingPutDelay; - - while (overagePendingQueue.size() > 0) { - WeakReference ref = overagePendingQueue.get(0); - PendingPut item = ref.get(); - if (item == null || item.completed) { - overagePendingQueue.remove(0); - } else { - if (item.timestamp < mustCleanTime) { - overagePendingQueue.remove(0); - toClean = item; - } - break; - } - } - } finally { - if (lock) { - pendingLock.unlock(); - } - } - - // We've found a pendingPut that never happened; clean it up - if (toClean != null) { - PendingPutMap map = pendingPuts.get(toClean.key); - if (map != null) { - if (map.acquireLock(100, TimeUnit.MILLISECONDS)) { - try { - PendingPut cleaned = map.remove(toClean.owner); - if (toClean.equals(cleaned) == false) { - if (cleaned != null) { - // Oops. Restore it. - map.put(cleaned); - } - } else if (map.size() == 0) { - pendingPuts.remove(toClean.key, map); - } - } - finally { - map.releaseLock(); - } - } else { - // Something's gone wrong and the lock isn't being released. - // We removed toClean from the queue and need to restore it - // TODO this is pretty dodgy - restorePendingPut(toClean); - } - } - } - - } - - private void restorePendingPut(PendingPut toRestore) { - pendingLock.lock(); - try { - // Give it a new lease on life so it's not out of order. We could - // scan the queue and put toRestore back at the front, but then - // we'll just immediately try removing it again; instead we - // let it cycle through the queue again - toRestore.refresh(); - pendingQueue.add(new WeakReference(toRestore)); - } - finally { - pendingLock.unlock(); - } - } - /** * Lazy-initialization map for PendingPut. Optimized for the expected usual case where only a * single put is pending for a given key. @@ -677,19 +512,12 @@ public class PutFromLoadValidator { } private static class PendingPut { - private final Object key; private final Object owner; - private long timestamp = System.currentTimeMillis(); private volatile boolean completed; - private PendingPut(Object key, Object owner) { - this.key = key; + private PendingPut(Object owner) { this.owner = owner; } - - private void refresh() { - timestamp = System.currentTimeMillis(); - } } private static class RecentRemoval { diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/TransactionalAccessDelegate.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/TransactionalAccessDelegate.java index 579072fa8c..36fcfcd46d 100755 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/TransactionalAccessDelegate.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/access/TransactionalAccessDelegate.java @@ -25,13 +25,14 @@ package org.hibernate.cache.infinispan.access; import javax.transaction.Transaction; +import org.hibernate.cache.infinispan.util.Caches; +import org.infinispan.AdvancedCache; +import org.infinispan.context.Flag; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.impl.BaseRegion; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.FlagAdapter; import org.hibernate.cache.spi.access.CollectionRegionAccessStrategy; import org.hibernate.cache.spi.access.EntityRegionAccessStrategy; import org.hibernate.cache.spi.access.SoftLock; @@ -49,20 +50,26 @@ import org.hibernate.cache.spi.access.SoftLock; public class TransactionalAccessDelegate { private static final Log log = LogFactory.getLog(TransactionalAccessDelegate.class); private static final boolean isTrace = log.isTraceEnabled(); - protected final CacheAdapter cacheAdapter; - protected final BaseRegion region; - protected final PutFromLoadValidator putValidator; + private final AdvancedCache cache; + private final BaseRegion region; + private final PutFromLoadValidator putValidator; + private final AdvancedCache writeCache; + private final AdvancedCache putFromLoadCache; public TransactionalAccessDelegate(BaseRegion region, PutFromLoadValidator validator) { this.region = region; - this.cacheAdapter = region.getCacheAdapter(); + this.cache = region.getCache(); this.putValidator = validator; + this.writeCache = Caches.isInvalidationCache(cache) ? + Caches.ignoreReturnValuesCache(cache, Flag.CACHE_MODE_LOCAL) : + Caches.ignoreReturnValuesCache(cache); + this.putFromLoadCache = Caches.ignoreReturnValuesCache(cache); } public Object get(Object key, long txTimestamp) throws CacheException { if (!region.checkValid()) return null; - Object val = cacheAdapter.get(key); + Object val = cache.get(key); if (val == null) putValidator.registerPendingPut(key); return val; @@ -84,7 +91,7 @@ public class TransactionalAccessDelegate { // without https://issues.jboss.org/browse/ISPN-1986, it's impossible to // know whether the put actually occurred. Knowing this is crucial so // that Hibernate can expose accurate statistics. - if (minimalPutOverride && cacheAdapter.containsKey(key)) + if (minimalPutOverride && cache.containsKey(key)) return false; if (!putValidator.acquirePutFromLoadLock(key)) { @@ -93,7 +100,7 @@ public class TransactionalAccessDelegate { } try { - cacheAdapter.putForExternalRead(key, value); + putFromLoadCache.putForExternalRead(key, value); } finally { putValidator.releasePutFromLoadLock(key); } @@ -119,11 +126,7 @@ public class TransactionalAccessDelegate { if (!region.checkValid()) return false; - if (cacheAdapter.isClusteredInvalidation()) - cacheAdapter.withFlags(FlagAdapter.CACHE_MODE_LOCAL).put(key, value); - else - cacheAdapter.put(key, value); - + writeCache.put(key, value); return true; } @@ -135,7 +138,7 @@ public class TransactionalAccessDelegate { // We update whether or not the region is valid. Other nodes // may have already restored the region so they need to // be informed of the change. - cacheAdapter.put(key, value); + writeCache.put(key, value); return true; } @@ -151,21 +154,21 @@ public class TransactionalAccessDelegate { // We update whether or not the region is valid. Other nodes // may have already restored the region so they need to // be informed of the change. - cacheAdapter.remove(key); + writeCache.remove(key); } public void removeAll() throws CacheException { if (!putValidator.invalidateRegion()) { throw new CacheException("Failed to invalidate pending putFromLoad calls for region " + region.getName()); } - cacheAdapter.clear(); + cache.clear(); } public void evict(Object key) throws CacheException { if (!putValidator.invalidateKey(key)) { throw new CacheException("Failed to invalidate pending putFromLoad calls for key " + key + " from region " + region.getName()); - } - cacheAdapter.remove(key); + } + writeCache.remove(key); } public void evictAll() throws CacheException { @@ -175,9 +178,10 @@ public class TransactionalAccessDelegate { Transaction tx = region.suspend(); try { region.invalidateRegion(); // Invalidate the local region and then go remote - cacheAdapter.broadcastEvictAll(); + Caches.broadcastEvictAll(cache); } finally { region.resume(tx); } } + } diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/collection/CollectionRegionImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/collection/CollectionRegionImpl.java index 36c308a200..8846430811 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/collection/CollectionRegionImpl.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/collection/CollectionRegionImpl.java @@ -1,16 +1,14 @@ package org.hibernate.cache.infinispan.collection; -import javax.transaction.TransactionManager; - import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.access.PutFromLoadValidator; import org.hibernate.cache.infinispan.impl.BaseTransactionalDataRegion; -import org.hibernate.cache.infinispan.util.CacheAdapter; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.CollectionRegion; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cache.spi.access.CollectionRegionAccessStrategy; +import org.infinispan.AdvancedCache; /** * @author Chris Bredesen @@ -19,9 +17,9 @@ import org.hibernate.cache.spi.access.CollectionRegionAccessStrategy; */ public class CollectionRegionImpl extends BaseTransactionalDataRegion implements CollectionRegion { - public CollectionRegionImpl(CacheAdapter cacheAdapter, String name, CacheDataDescription metadata, - TransactionManager transactionManager, RegionFactory factory) { - super(cacheAdapter, name, metadata, transactionManager, factory); + public CollectionRegionImpl(AdvancedCache cache, String name, + CacheDataDescription metadata, RegionFactory factory) { + super(cache, name, metadata, factory); } public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException { @@ -33,6 +31,7 @@ public class CollectionRegionImpl extends BaseTransactionalDataRegion implements } public PutFromLoadValidator getPutFromLoadValidator() { - return new PutFromLoadValidator(transactionManager); + return new PutFromLoadValidator(cache); } + } diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/entity/EntityRegionImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/entity/EntityRegionImpl.java index bed947b9d0..ad66557cfb 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/entity/EntityRegionImpl.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/entity/EntityRegionImpl.java @@ -1,16 +1,14 @@ package org.hibernate.cache.infinispan.entity; -import javax.transaction.TransactionManager; - import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.access.PutFromLoadValidator; import org.hibernate.cache.infinispan.impl.BaseTransactionalDataRegion; -import org.hibernate.cache.infinispan.util.CacheAdapter; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.EntityRegion; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cache.spi.access.EntityRegionAccessStrategy; +import org.infinispan.AdvancedCache; /** * @author Chris Bredesen @@ -19,9 +17,9 @@ import org.hibernate.cache.spi.access.EntityRegionAccessStrategy; */ public class EntityRegionImpl extends BaseTransactionalDataRegion implements EntityRegion { - public EntityRegionImpl(CacheAdapter cacheAdapter, String name, CacheDataDescription metadata, - TransactionManager transactionManager, RegionFactory factory) { - super(cacheAdapter, name, metadata, transactionManager, factory); + public EntityRegionImpl(AdvancedCache cache, String name, + CacheDataDescription metadata, RegionFactory factory) { + super(cache, name, metadata, factory); } public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException { @@ -34,6 +32,7 @@ public class EntityRegionImpl extends BaseTransactionalDataRegion implements Ent } public PutFromLoadValidator getPutFromLoadValidator() { - return new PutFromLoadValidator(transactionManager); + return new PutFromLoadValidator(cache); } + } \ No newline at end of file diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseGeneralDataRegion.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseGeneralDataRegion.java index bfc582bb9c..3c33dbf330 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseGeneralDataRegion.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseGeneralDataRegion.java @@ -3,9 +3,10 @@ package org.hibernate.cache.infinispan.impl; import javax.transaction.TransactionManager; import org.hibernate.cache.CacheException; -import org.hibernate.cache.infinispan.util.CacheAdapter; +import org.hibernate.cache.infinispan.util.Caches; import org.hibernate.cache.spi.GeneralDataRegion; import org.hibernate.cache.spi.RegionFactory; +import org.infinispan.AdvancedCache; /** * Support for Infinispan {@link GeneralDataRegion} implementors. @@ -16,24 +17,28 @@ import org.hibernate.cache.spi.RegionFactory; */ public abstract class BaseGeneralDataRegion extends BaseRegion implements GeneralDataRegion { - public BaseGeneralDataRegion(CacheAdapter cacheAdapter, String name, TransactionManager transactionManager, RegionFactory factory) { - super(cacheAdapter, name, transactionManager, factory); + private final AdvancedCache putCache; + + public BaseGeneralDataRegion(AdvancedCache cache, String name, + RegionFactory factory) { + super(cache, name, factory); + this.putCache = Caches.ignoreReturnValuesCache(cache); } public void evict(Object key) throws CacheException { - cacheAdapter.evict(key); + cache.evict(key); } public void evictAll() throws CacheException { - cacheAdapter.clear(); + cache.clear(); } public Object get(Object key) throws CacheException { - return cacheAdapter.get(key); + return cache.get(key); } public void put(Object key, Object value) throws CacheException { - cacheAdapter.put(key, value); + putCache.put(key, value); } } \ No newline at end of file diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseRegion.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseRegion.java index ac57c32597..67126ca078 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseRegion.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseRegion.java @@ -8,13 +8,13 @@ import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; +import org.hibernate.cache.infinispan.util.Caches; +import org.infinispan.AdvancedCache; +import org.infinispan.context.Flag; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; import org.hibernate.cache.CacheException; -import org.hibernate.cache.infinispan.util.AddressAdapter; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.FlagAdapter; import org.hibernate.cache.spi.Region; import org.hibernate.cache.spi.RegionFactory; @@ -29,37 +29,38 @@ import org.hibernate.cache.spi.RegionFactory; */ public abstract class BaseRegion implements Region { - private enum InvalidateState { INVALID, CLEARING, VALID }; private static final Log log = LogFactory.getLog(BaseRegion.class); + + private enum InvalidateState { + INVALID, CLEARING, VALID + } + private final String name; - protected final CacheAdapter cacheAdapter; - protected final AddressAdapter address; - protected final TransactionManager transactionManager; - protected final boolean replication; - protected final Object invalidationMutex = new Object(); - protected final AtomicReference invalidateState = new AtomicReference(InvalidateState.VALID); + private final AdvancedCache regionClearCache; + private final TransactionManager tm; + private final Object invalidationMutex = new Object(); + private final AtomicReference invalidateState = + new AtomicReference(InvalidateState.VALID); private final RegionFactory factory; - public BaseRegion(CacheAdapter cacheAdapter, String name, TransactionManager transactionManager, RegionFactory factory) { - this.cacheAdapter = cacheAdapter; + protected final AdvancedCache cache; + + public BaseRegion(AdvancedCache cache, String name, RegionFactory factory) { + this.cache = cache; this.name = name; - this.transactionManager = transactionManager; - this.replication = cacheAdapter.isClusteredReplication(); - this.address = this.cacheAdapter.getAddress(); + this.tm = cache.getTransactionManager(); this.factory = factory; + this.regionClearCache = cache.withFlags( + Flag.CACHE_MODE_LOCAL, Flag.ZERO_LOCK_ACQUISITION_TIMEOUT); } public String getName() { return name; } - public CacheAdapter getCacheAdapter() { - return cacheAdapter; - } - public long getElementCountInMemory() { if (checkValid()) - return cacheAdapter.size(); + return cache.size(); return 0; } @@ -92,51 +93,46 @@ public abstract class BaseRegion implements Region { public Map toMap() { if (checkValid()) - return cacheAdapter.toMap(); + return cache; return Collections.EMPTY_MAP; } public void destroy() throws CacheException { try { - cacheAdapter.stop(); + cache.stop(); } finally { - cacheAdapter.removeListener(this); + cache.removeListener(this); } } public boolean contains(Object key) { - if (!checkValid()) - return false; - // Reads are non-blocking in Infinispan, so not sure of the necessity of passing ZERO_LOCK_ACQUISITION_TIMEOUT - return cacheAdapter.withFlags(FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT).containsKey(key); - } - - public AddressAdapter getAddress() { - return address; + return checkValid() && cache.containsKey(key); } public boolean checkValid() { boolean valid = isValid(); if (!valid) { synchronized (invalidationMutex) { - if (invalidateState.compareAndSet(InvalidateState.INVALID, InvalidateState.CLEARING)) { + if (invalidateState.compareAndSet( + InvalidateState.INVALID, InvalidateState.CLEARING)) { Transaction tx = suspend(); try { // Clear region in a separate transaction - cacheAdapter.withinTx(new Callable() { + Caches.withinTx(cache, new Callable() { @Override public Void call() throws Exception { - cacheAdapter.withFlags(FlagAdapter.CACHE_MODE_LOCAL, - FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT).clear(); + regionClearCache.clear(); return null; } }); - invalidateState.compareAndSet(InvalidateState.CLEARING, InvalidateState.VALID); + invalidateState.compareAndSet( + InvalidateState.CLEARING, InvalidateState.VALID); } catch (Exception e) { if (log.isTraceEnabled()) { - log.trace("Could not invalidate region: " + e.getLocalizedMessage()); + log.trace("Could not invalidate region: " + + e.getLocalizedMessage()); } } finally { @@ -150,44 +146,10 @@ public abstract class BaseRegion implements Region { return valid; } - - protected boolean isValid() { return invalidateState.get() == InvalidateState.VALID; } - /** - * Performs a Infinispan get(Fqn, Object) - * - * @param key The key of the item to get - * @param suppressTimeout should any TimeoutException be suppressed? - * @param flagAdapters flags to add to the get invocation - * @return The retrieved object - * @throws CacheException issue managing transaction or talking to cache - */ - protected Object get(Object key, boolean suppressTimeout, FlagAdapter... flagAdapters) throws CacheException { - CacheAdapter localCacheAdapter = cacheAdapter; - if (flagAdapters != null && flagAdapters.length > 0) - localCacheAdapter = cacheAdapter.withFlags(flagAdapters); - - if (suppressTimeout) - return localCacheAdapter.getAllowingTimeout(key); - else - return localCacheAdapter.get(key); - } - - public Object getOwnerForPut() { - Transaction tx = null; - try { - if (transactionManager != null) { - tx = transactionManager.getTransaction(); - } - } catch (SystemException se) { - throw new CacheException("Could not obtain transaction", se); - } - return tx == null ? Thread.currentThread() : tx; - } - /** * Tell the TransactionManager to suspend any ongoing transaction. * @@ -197,8 +159,8 @@ public abstract class BaseRegion implements Region { public Transaction suspend() { Transaction tx = null; try { - if (transactionManager != null) { - tx = transactionManager.suspend(); + if (tm != null) { + tx = tm.suspend(); } } catch (SystemException se) { throw new CacheException("Could not suspend transaction", se); @@ -215,7 +177,7 @@ public abstract class BaseRegion implements Region { public void resume(Transaction tx) { try { if (tx != null) - transactionManager.resume(tx); + tm.resume(tx); } catch (Exception e) { throw new CacheException("Could not resume transaction", e); } @@ -227,7 +189,17 @@ public abstract class BaseRegion implements Region { } public TransactionManager getTransactionManager() { - return transactionManager; + return tm; + } + + // Used to satisfy TransactionalDataRegion.isTransactionAware in subclasses + @SuppressWarnings("unused") + public boolean isTransactionAware() { + return tm != null; + } + + public AdvancedCache getCache() { + return cache; } } diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseTransactionalDataRegion.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseTransactionalDataRegion.java index 1151e956e1..880d3eece7 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseTransactionalDataRegion.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/impl/BaseTransactionalDataRegion.java @@ -1,11 +1,9 @@ package org.hibernate.cache.infinispan.impl; -import javax.transaction.TransactionManager; - -import org.hibernate.cache.infinispan.util.CacheAdapter; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.TransactionalDataRegion; +import org.infinispan.AdvancedCache; /** * Support for Inifinispan {@link org.hibernate.cache.spi.TransactionalDataRegion} implementors. @@ -19,10 +17,9 @@ public abstract class BaseTransactionalDataRegion private final CacheDataDescription metadata; - public BaseTransactionalDataRegion(CacheAdapter cacheAdapter, String name, - CacheDataDescription metadata, TransactionManager transactionManager, - RegionFactory factory) { - super(cacheAdapter, name, transactionManager, factory); + public BaseTransactionalDataRegion(AdvancedCache cache, String name, + CacheDataDescription metadata, RegionFactory factory) { + super(cache, name, factory); this.metadata = metadata; } @@ -30,8 +27,4 @@ public abstract class BaseTransactionalDataRegion return metadata; } - public boolean isTransactionAware() { - return transactionManager != null; - } - } \ No newline at end of file diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/naturalid/NaturalIdRegionImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/naturalid/NaturalIdRegionImpl.java index fbabe8603e..8f6f43fdb9 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/naturalid/NaturalIdRegionImpl.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/naturalid/NaturalIdRegionImpl.java @@ -1,25 +1,27 @@ package org.hibernate.cache.infinispan.naturalid; -import javax.transaction.TransactionManager; - import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.access.PutFromLoadValidator; import org.hibernate.cache.infinispan.impl.BaseTransactionalDataRegion; -import org.hibernate.cache.infinispan.util.CacheAdapter; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.NaturalIdRegion; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cache.spi.access.NaturalIdRegionAccessStrategy; +import org.infinispan.AdvancedCache; /** + * Natural ID cache region + * * @author Strong Liu + * @author Galder Zamarreño */ -public class NaturalIdRegionImpl extends BaseTransactionalDataRegion implements NaturalIdRegion { - public NaturalIdRegionImpl(CacheAdapter cacheAdapter, - String name, CacheDataDescription metadata, - TransactionManager transactionManager, RegionFactory factory) { - super( cacheAdapter, name, metadata, transactionManager, factory ); +public class NaturalIdRegionImpl extends BaseTransactionalDataRegion + implements NaturalIdRegion { + + public NaturalIdRegionImpl(AdvancedCache cache, String name, + CacheDataDescription metadata, RegionFactory factory) { + super(cache, name, metadata, factory); } @Override @@ -33,6 +35,7 @@ public class NaturalIdRegionImpl extends BaseTransactionalDataRegion implements } public PutFromLoadValidator getPutFromLoadValidator() { - return new PutFromLoadValidator(transactionManager); + return new PutFromLoadValidator(cache); } + } diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/query/QueryResultsRegionImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/query/QueryResultsRegionImpl.java index 88c48d3c67..0bc905662b 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/query/QueryResultsRegionImpl.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/query/QueryResultsRegionImpl.java @@ -1,15 +1,14 @@ package org.hibernate.cache.infinispan.query; -import java.util.Properties; import javax.transaction.Transaction; -import javax.transaction.TransactionManager; import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.impl.BaseTransactionalDataRegion; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.FlagAdapter; +import org.hibernate.cache.infinispan.util.Caches; import org.hibernate.cache.spi.QueryResultsRegion; import org.hibernate.cache.spi.RegionFactory; +import org.infinispan.AdvancedCache; +import org.infinispan.context.Flag; /** * @author Chris Bredesen @@ -17,27 +16,35 @@ import org.hibernate.cache.spi.RegionFactory; * @since 3.5 */ public class QueryResultsRegionImpl extends BaseTransactionalDataRegion implements QueryResultsRegion { - private boolean localOnly; - public QueryResultsRegionImpl(CacheAdapter cacheAdapter, String name, Properties properties, TransactionManager transactionManager, RegionFactory factory) { - super(cacheAdapter, name, null, transactionManager, factory); + private final AdvancedCache evictCache; + private final AdvancedCache putCache; + private final AdvancedCache getCache; + + public QueryResultsRegionImpl(AdvancedCache cache, String name, RegionFactory factory) { + super(cache, name, null, factory); // If Infinispan is using INVALIDATION for query cache, we don't want to propagate changes. // We use the Timestamps cache to manage invalidation - localOnly = cacheAdapter.isClusteredInvalidation(); + boolean localOnly = Caches.isInvalidationCache(cache); + + this.evictCache = localOnly ? Caches.localCache(cache) : cache; + + this.putCache = localOnly ? + Caches.failSilentWriteCache(cache, Flag.CACHE_MODE_LOCAL) : + Caches.failSilentWriteCache(cache); + + this.getCache = Caches.failSilentReadCache(cache); } public void evict(Object key) throws CacheException { - if (localOnly) - cacheAdapter.withFlags(FlagAdapter.CACHE_MODE_LOCAL).remove(key); - else - cacheAdapter.remove(key); + evictCache.remove(key); } public void evictAll() throws CacheException { Transaction tx = suspend(); try { invalidateRegion(); // Invalidate the local region and then go remote - cacheAdapter.broadcastEvictAll(); + Caches.broadcastEvictAll(cache); } finally { resume(tx); } @@ -60,9 +67,9 @@ public class QueryResultsRegionImpl extends BaseTransactionalDataRegion implemen // Add a zero (or low) timeout option so we don't block // waiting for tx's that did a put to commit if (skipCacheStore) - return get(key, true, FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT, FlagAdapter.SKIP_CACHE_STORE); + return getCache.withFlags(Flag.SKIP_CACHE_STORE).get(key); else - return get(key, true, FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT); + return getCache.get(key); } public void put(Object key, Object value) throws CacheException { @@ -82,12 +89,8 @@ public class QueryResultsRegionImpl extends BaseTransactionalDataRegion implemen // any subsequent read will just see the old result with its // out-of-date timestamp; that result will be discarded and the // db query performed again. - if (localOnly) - cacheAdapter.withFlags(FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT, FlagAdapter.CACHE_MODE_LOCAL) - .putAllowingTimeout(key, value); - else - cacheAdapter.withFlags(FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT) - .putAllowingTimeout(key, value); + putCache.put(key, value); } } + } \ No newline at end of file diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/ClusteredTimestampsRegionImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/ClusteredTimestampsRegionImpl.java new file mode 100644 index 0000000000..5914160a2e --- /dev/null +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/ClusteredTimestampsRegionImpl.java @@ -0,0 +1,152 @@ +/* + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other + * contributors as indicated by the @author tags. All rights reserved. + * See the copyright.txt in the distribution for a full listing of + * individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ + +package org.hibernate.cache.infinispan.timestamp; + +import org.hibernate.cache.CacheException; +import org.hibernate.cache.infinispan.util.Caches; +import org.hibernate.cache.spi.RegionFactory; +import org.infinispan.AdvancedCache; +import org.infinispan.context.Flag; +import org.infinispan.notifications.Listener; +import org.infinispan.notifications.cachelistener.annotation.CacheEntryModified; +import org.infinispan.notifications.cachelistener.annotation.CacheEntryRemoved; +import org.infinispan.notifications.cachelistener.event.CacheEntryModifiedEvent; +import org.infinispan.notifications.cachelistener.event.CacheEntryRemovedEvent; + +import javax.transaction.Transaction; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Timestamp cache region for clustered environments. + * + * @author Galder Zamarreño + * @since 4.1 + */ +@Listener +public class ClusteredTimestampsRegionImpl extends TimestampsRegionImpl { + + /** + * Maintains a local (authoritative) cache of timestamps along with the + * replicated cache held in Infinispan. It listens for changes in the + * cache and updates the local cache accordingly. This approach allows + * timestamp changes to be replicated asynchronously. + */ + private final Map localCache = new ConcurrentHashMap(); + + public ClusteredTimestampsRegionImpl(AdvancedCache cache, + String name, RegionFactory factory) { + super(cache, name, factory); + cache.addListener(this); + populateLocalCache(); + } + + @Override + protected AdvancedCache getTimestampsPutCache(AdvancedCache cache) { + return Caches.asyncWriteCache(cache, Flag.SKIP_LOCKING); + } + + @Override + public Object get(Object key) throws CacheException { + Object value = localCache.get(key); + + // If the region is not valid, skip cache store to avoid going remote to retrieve the query. + // The aim of this is to maintain same logic/semantics as when state transfer was configured. + // TODO: Once https://issues.jboss.org/browse/ISPN-835 has been resolved, revert to state transfer and remove workaround + boolean skipCacheStore = false; + if (!isValid()) + skipCacheStore = true; + + if (value == null && checkValid()) { + if (skipCacheStore) + value = cache.withFlags(Flag.SKIP_CACHE_STORE).get(key); + else + value = cache.get(key); + + if (value != null) + localCache.put(key, value); + } + return value; + } + + @Override + public void evictAll() throws CacheException { + // TODO Is this a valid operation on a timestamps cache? + Transaction tx = suspend(); + try { + invalidateRegion(); // Invalidate the local region and then go remote + Caches.broadcastEvictAll(cache); + } finally { + resume(tx); + } + } + + @Override + public void invalidateRegion() { + super.invalidateRegion(); // Invalidate first + localCache.clear(); + } + + @Override + public void destroy() throws CacheException { + localCache.clear(); + cache.removeListener(this); + super.destroy(); + } + + /** + * Brings all data from the distributed cache into our local cache. + */ + private void populateLocalCache() { + Set children = cache.keySet(); + for (Object key : children) + get(key); + } + + /** + * Monitors cache events and updates the local cache + * + * @param event + */ + @CacheEntryModified + @SuppressWarnings("unused") + public void nodeModified(CacheEntryModifiedEvent event) { + if (!event.isPre()) + localCache.put(event.getKey(), event.getValue()); + } + + /** + * Monitors cache events and updates the local cache + * + * @param event + */ + @CacheEntryRemoved + @SuppressWarnings("unused") + public void nodeRemoved(CacheEntryRemovedEvent event) { + if (event.isPre()) return; + localCache.remove(event.getKey()); + } + +} diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampTypeOverrides.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampTypeOverrides.java index 5b7db8dca4..c965982ca2 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampTypeOverrides.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampTypeOverrides.java @@ -21,8 +21,7 @@ */ package org.hibernate.cache.infinispan.timestamp; -import org.infinispan.config.Configuration; -import org.infinispan.config.Configuration.CacheMode; +import org.infinispan.configuration.cache.Configuration; import org.infinispan.eviction.EvictionStrategy; import org.hibernate.cache.CacheException; @@ -35,15 +34,16 @@ import org.hibernate.cache.infinispan.TypeOverrides; * @since 3.5 */ public class TimestampTypeOverrides extends TypeOverrides { + @Override - public void validateInfinispanConfiguration(Configuration configuration) throws CacheException { - CacheMode cacheMode = configuration.getCacheMode(); - if (cacheMode.equals(CacheMode.INVALIDATION_ASYNC) || cacheMode.equals(CacheMode.INVALIDATION_SYNC)) { + public void validateInfinispanConfiguration(Configuration cfg) throws CacheException { + if (cfg.clustering().cacheMode().isInvalidation()) { throw new CacheException("Timestamp cache cannot be configured with invalidation"); } - EvictionStrategy strategy = configuration.getEvictionStrategy(); + EvictionStrategy strategy = cfg.eviction().strategy(); if (!strategy.equals(EvictionStrategy.NONE)) { throw new CacheException("Timestamp cache cannot be configured with eviction"); } } + } diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampsRegionImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampsRegionImpl.java index 5e0ad496c1..fc1d505ed5 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampsRegionImpl.java +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/timestamp/TimestampsRegionImpl.java @@ -4,8 +4,10 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import javax.transaction.Transaction; -import javax.transaction.TransactionManager; +import org.hibernate.cache.infinispan.util.Caches; +import org.infinispan.AdvancedCache; +import org.infinispan.context.Flag; import org.infinispan.notifications.Listener; import org.infinispan.notifications.cachelistener.annotation.CacheEntryModified; import org.infinispan.notifications.cachelistener.annotation.CacheEntryRemoved; @@ -14,8 +16,6 @@ import org.infinispan.notifications.cachelistener.event.CacheEntryRemovedEvent; import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.impl.BaseGeneralDataRegion; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.FlagAdapter; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.TimestampsRegion; @@ -26,110 +26,64 @@ import org.hibernate.cache.spi.TimestampsRegion; * @author Galder Zamarreño * @since 3.5 */ -@Listener public class TimestampsRegionImpl extends BaseGeneralDataRegion implements TimestampsRegion { - private Map localCache = new ConcurrentHashMap(); + private final AdvancedCache removeCache; + private final AdvancedCache timestampsPutCache; - public TimestampsRegionImpl(CacheAdapter cacheAdapter, String name, TransactionManager transactionManager, RegionFactory factory) { - super(cacheAdapter, name, transactionManager, factory); - cacheAdapter.addListener(this); - populateLocalCache(); + public TimestampsRegionImpl(AdvancedCache cache, String name, + RegionFactory factory) { + super(cache, name, factory); + this.removeCache = Caches.ignoreReturnValuesCache(cache); + + // Skip locking when updating timestamps to provide better performance + // under highly concurrent insert scenarios, where update timestamps + // for an entity/collection type are constantly updated, creating + // contention. + // + // The worst it can happen is that an earlier an earlier timestamp + // (i.e. ts=1) will override a later on (i.e. ts=2), so it means that + // in highly concurrent environments, queries might be considered stale + // earlier in time. The upside is that inserts/updates are way faster + // in local set ups. + this.timestampsPutCache = getTimestampsPutCache(cache); + } + + protected AdvancedCache getTimestampsPutCache(AdvancedCache cache) { + return Caches.ignoreReturnValuesCache(cache, Flag.SKIP_LOCKING); } @Override public void evict(Object key) throws CacheException { // TODO Is this a valid operation on a timestamps cache? - cacheAdapter.remove(key); + removeCache.remove(key); } public void evictAll() throws CacheException { // TODO Is this a valid operation on a timestamps cache? Transaction tx = suspend(); try { - invalidateRegion(); // Invalidate the local region and then go remote - cacheAdapter.broadcastEvictAll(); + invalidateRegion(); // Invalidate the local region } finally { resume(tx); } } public Object get(Object key) throws CacheException { - Object value = localCache.get(key); + if (checkValid()) + return cache.get(key); - // If the region is not valid, skip cache store to avoid going remote to retrieve the query. - // The aim of this is to maintain same logic/semantics as when state transfer was configured. - // TODO: Once https://issues.jboss.org/browse/ISPN-835 has been resolved, revert to state transfer and remove workaround - boolean skipCacheStore = false; - if (!isValid()) - skipCacheStore = true; - - if (value == null && checkValid()) { - if (skipCacheStore) - value = get(key, false, FlagAdapter.SKIP_CACHE_STORE); - else - value = get(key, false); - - if (value != null) - localCache.put(key, value); - } - return value; + return null; } public void put(final Object key, final Object value) throws CacheException { try { // We ensure ASYNC semantics (JBCACHE-1175) and make sure previous // value is not loaded from cache store cos it's not needed. - cacheAdapter.withFlags(FlagAdapter.FORCE_ASYNCHRONOUS).put(key, value); + timestampsPutCache.put(key, value); } catch (Exception e) { throw new CacheException(e); } } - @Override - public void destroy() throws CacheException { - localCache.clear(); - cacheAdapter.removeListener(this); - super.destroy(); - } - - /** - * Monitors cache events and updates the local cache - * - * @param event - */ - @CacheEntryModified - @SuppressWarnings("unused") - public void nodeModified(CacheEntryModifiedEvent event) { - if (!event.isPre()) - localCache.put(event.getKey(), event.getValue()); - } - - /** - * Monitors cache events and updates the local cache - * - * @param event - */ - @CacheEntryRemoved - @SuppressWarnings("unused") - public void nodeRemoved(CacheEntryRemovedEvent event) { - if (event.isPre()) return; - localCache.remove(event.getKey()); - } - - @Override - public void invalidateRegion() { - super.invalidateRegion(); // Invalidate first - localCache.clear(); - } - - /** - * Brings all data from the distributed cache into our local cache. - */ - private void populateLocalCache() { - Set children = cacheAdapter.keySet(); - for (Object key : children) - get(key); - } - } \ No newline at end of file diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/AddressAdapter.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/AddressAdapter.java deleted file mode 100644 index 767ea1c459..0000000000 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/AddressAdapter.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * JBoss, Home of Professional Open Source. - * Copyright 2009, Red Hat, Inc. and/or its affiliates, and - * individual contributors as indicated by the @author tags. See the - * copyright.txt file in the distribution for a full listing of - * individual contributors. - * - * This is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as - * published by the Free Software Foundation; either version 2.1 of - * the License, or (at your option) any later version. - * - * This software is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this software; if not, write to the Free - * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA - * 02110-1301 USA, or see the FSF site: http://www.fsf.org. - */ -package org.hibernate.cache.infinispan.util; - - -/** - * AddressAdapter. - * - * @author Galder Zamarreño - * @since 3.5 - */ -public interface AddressAdapter { -} diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/AddressAdapterImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/AddressAdapterImpl.java deleted file mode 100644 index 03a36393c0..0000000000 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/AddressAdapterImpl.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * JBoss, Home of Professional Open Source. - * Copyright 2009, Red Hat, Inc. and/or its affiliates, and - * individual contributors as indicated by the @author tags. See the - * copyright.txt file in the distribution for a full listing of - * individual contributors. - * - * This is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as - * published by the Free Software Foundation; either version 2.1 of - * the License, or (at your option) any later version. - * - * This software is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this software; if not, write to the Free - * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA - * 02110-1301 USA, or see the FSF site: http://www.fsf.org. - */ -package org.hibernate.cache.infinispan.util; - -import java.io.Externalizable; -import java.io.IOException; -import java.io.ObjectInput; -import java.io.ObjectOutput; -import java.util.ArrayList; -import java.util.List; - -import org.infinispan.remoting.transport.Address; - -/** - * AddressAdapterImpl. - * - * @author Galder Zamarreño - * @since 3.5 - */ -public class AddressAdapterImpl implements AddressAdapter, Externalizable { - - private Address address; - - // Required by Java Externalizable - public AddressAdapterImpl() { - } - - public AddressAdapterImpl(Address address) { - this.address = address; - } - - static AddressAdapter newInstance(Address address) { - return new AddressAdapterImpl(address); - } - - public static List toAddressAdapter(List

ispnAddresses) { - List addresses = new ArrayList(ispnAddresses.size()); - for (Address address : ispnAddresses) { - addresses.add(AddressAdapterImpl.newInstance(address)); - } - return addresses; - } - - public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { - address = (Address) in.readObject(); - } - - public void writeExternal(ObjectOutput out) throws IOException { - out.writeObject(address); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) - return true; - if (!(obj instanceof AddressAdapterImpl)) - return false; - AddressAdapterImpl other = (AddressAdapterImpl) obj; - return other.address.equals(address); - } - - @Override - public int hashCode() { - int result = 17; - result = 31 * result + address.hashCode(); - return result; - } -} diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheAdapter.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheAdapter.java deleted file mode 100644 index 3d9b0f2203..0000000000 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheAdapter.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * JBoss, Home of Professional Open Source. - * Copyright 2009, Red Hat, Inc. and/or its affiliates, and - * individual contributors as indicated by the @author tags. See the - * copyright.txt file in the distribution for a full listing of - * individual contributors. - * - * This is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as - * published by the Free Software Foundation; either version 2.1 of - * the License, or (at your option) any later version. - * - * This software is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this software; if not, write to the Free - * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA - * 02110-1301 USA, or see the FSF site: http://www.fsf.org. - */ -package org.hibernate.cache.infinispan.util; - -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Callable; - -import org.infinispan.Cache; -import org.infinispan.config.Configuration; -import org.infinispan.util.concurrent.TimeoutException; - -import org.hibernate.cache.CacheException; - -/** - * Infinispan cache abstraction. - * - * @author Galder Zamarreño - * @since 3.5 - */ -public interface CacheAdapter { - - /** - * Is this cache participating in a cluster with invalidation? - * - * @return true if the cache is configured for synchronous/asynchronous invalidation; false otherwise. - */ - boolean isClusteredInvalidation(); - - /** - * Is this cache participating in a cluster with replication? - * - * @return true if the cache is configured for synchronous/asynchronous invalidation; false otherwise. - */ - boolean isClusteredReplication(); - - /** - * Is this cache configured for synchronous communication? - * - * @return true if the cache is configured for synchronous communication; false otherwise. - */ - boolean isSynchronous(); - - /** - * Set of keys of this cache. - * - * @return Set containing keys stored in this cache. - */ - Set keySet(); - - /** - * A builder-style method that adds flags to any cache API call. - * - * @param flagAdapters a set of flags to apply. See the {@link FlagAdapter} documentation. - * @return a cache on which a real operation is to be invoked. - */ - CacheAdapter withFlags(FlagAdapter... flagAdapters); - - /** - * Method to check whether a certain key exists in this cache. - * - * @param key key to look up. - * @return true if key is present, false otherwise. - */ - boolean containsKey(Object key); - - /** - * Performs an get(Object) on the cache, wrapping any exception in a {@link CacheException}. - * - * @param key key to retrieve - * @throws CacheException - */ - Object get(Object key) throws CacheException; - - /** - * Performs an get(Object) on the cache ignoring any {@link TimeoutException} - * and wrapping any other exception in a {@link CacheException}. - * - * @param key key to retrieve - * @throws CacheException - */ - Object getAllowingTimeout(Object key) throws CacheException; - - /** - * Performs a put(Object, Object) on the cache, - * wrapping any exception in a {@link CacheException}. - * - * @param key key whose value will be modified - * @param value data to store in the cache entry - * @throws CacheException - */ - void put(Object key, Object value) throws CacheException; - - /** - * Performs a put(Object, Object) on the cache ignoring - * any {@link TimeoutException} and wrapping any exception in a - * {@link CacheException}. - * - * @param key key whose value will be modified - * @param value data to store in the cache entry - * @throws CacheException - */ - void putAllowingTimeout(Object key, Object value) throws CacheException; - - /** - * See {@link Cache#putForExternalRead(Object, Object)} for detailed documentation. - * - * @param key key with which the specified value is to be associated. - * @param value value to be associated with the specified key. - * @throws CacheException - */ - void putForExternalRead(Object key, Object value) throws CacheException; - - /** - * Performs a remove(Object), wrapping any exception in - * a {@link CacheException}. - * - * @param key key to be removed - * @throws CacheException - */ - void remove(Object key) throws CacheException; - - /** - * Evict the given key from memory. - * - * @param key to evict. - */ - void evict(Object key) throws CacheException; - - /** - * Clear the cache. - * - * @throws CacheException - */ - void clear() throws CacheException; - - /** - * Stops the cache. - */ - void stop(); - - /** - * Add listener to this cache. - * - * @param listener to be added to cache. - */ - void addListener(Object listener); - - /** - * Get local cluster address. - * - * @return Address representing local address. - */ - AddressAdapter getAddress(); - - /** - * Get cluster members. - * - * @return List of cluster member Address instances - */ - List getMembers(); - - /** - * Size of cache. - * - * @return number of cache entries. - */ - int size(); - - /** - * This method returns a Map view of the cache. - * - * @return Map view of cache. - */ - Map toMap(); - - /** - * Remove listener from cache instance. - * - * @param listener to be removed. - */ - void removeListener(Object listener); - - /** - * Get cache configuration. - * - * @return Configuration instance associated with this cache. - */ - Configuration getConfiguration(); - - /** - * TODO - */ - void broadcastEvictAll(); - - /** - * TODO - * - * @param c - * @param - * @return - */ - T withinTx(Callable c) throws Exception; - - Cache getCache(); - -} diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheAdapterImpl.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheAdapterImpl.java deleted file mode 100644 index 3bf08abae0..0000000000 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheAdapterImpl.java +++ /dev/null @@ -1,255 +0,0 @@ -/* - * JBoss, Home of Professional Open Source. - * Copyright 2009, Red Hat, Inc. and/or its affiliates, and - * individual contributors as indicated by the @author tags. See the - * copyright.txt file in the distribution for a full listing of - * individual contributors. - * - * This is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as - * published by the Free Software Foundation; either version 2.1 of - * the License, or (at your option) any later version. - * - * This software is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this software; if not, write to the Free - * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA - * 02110-1301 USA, or see the FSF site: http://www.fsf.org. - */ -package org.hibernate.cache.infinispan.util; - -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Callable; - -import org.infinispan.AdvancedCache; -import org.infinispan.Cache; -import org.infinispan.config.Configuration; -import org.infinispan.context.Flag; -import org.infinispan.remoting.rpc.RpcManager; -import org.infinispan.util.concurrent.TimeoutException; -import org.infinispan.util.logging.Log; -import org.infinispan.util.logging.LogFactory; - -import org.hibernate.cache.CacheException; - -/** - * CacheAdapterImpl. - * - * @author Galder Zamarreño - * @since 3.5 - */ -public class CacheAdapterImpl implements CacheAdapter { - private static final Log log = LogFactory.getLog(CacheAdapterImpl.class); - - private final AdvancedCache cache; - private final CacheCommandInitializer cacheCmdInitializer; - private final boolean isSync; - - private CacheAdapterImpl(AdvancedCache cache) { - this.cache = cache; - this.cacheCmdInitializer = cache.getComponentRegistry() - .getComponent(CacheCommandInitializer.class); - this.isSync = isSynchronous(cache.getConfiguration().getCacheMode()); - } - - public static CacheAdapter newInstance(AdvancedCache cache) { - return new CacheAdapterImpl(cache); - } - - public boolean isClusteredInvalidation() { - return isClusteredInvalidation(cache.getConfiguration().getCacheMode()); - } - - public boolean isClusteredReplication() { - return isClusteredReplication(cache.getConfiguration().getCacheMode()); - } - - public boolean isSynchronous() { - return isSync; - } - - public Set keySet() { - return cache.keySet(); - } - - public CacheAdapter withFlags(FlagAdapter... flagAdapters) { - Flag[] flags = FlagAdapter.toFlags(flagAdapters); - return newInstance(cache.withFlags(flags)); - } - - public Object get(Object key) throws CacheException { - try { - return cache.get(key); - } catch (Exception e) { - throw new CacheException(e); - } - } - - public Object getAllowingTimeout(Object key) throws CacheException { - try { - return getFailSilentCache().get(key); - } catch (TimeoutException ignored) { - // ignore it - return null; - } catch (Exception e) { - throw new CacheException(e); - } - } - - public void put(Object key, Object value) throws CacheException { - try { - // No previous value interest, so apply flags that avoid remote lookups. - getSkipRemoteGetLoadCache().put(key, value); - } catch (Exception e) { - throw new CacheException(e); - } - } - - public void putAllowingTimeout(Object key, Object value) throws CacheException { - try { - // No previous value interest, so apply flags that avoid remote lookups. - getFailSilentCacheSkipRemotes().put(key, value); - } catch (TimeoutException allowed) { - // ignore it - } catch (Exception e) { - throw new CacheException(e); - } - } - - public void putForExternalRead(Object key, Object value) throws CacheException { - try { - // No previous value interest, so apply flags that avoid remote lookups. - getFailSilentCacheSkipRemotes().putForExternalRead(key, value); - } catch (Exception e) { - throw new CacheException(e); - } - } - - public void remove(Object key) throws CacheException { - try { - // No previous value interest, so apply flags that avoid remote lookups. - getSkipRemoteGetLoadCache().remove(key); - } catch (Exception e) { - throw new CacheException(e); - } - } - - public void evict(Object key) throws CacheException { - try { - cache.evict(key); - } catch (Exception e) { - throw new CacheException(e); - } - } - - public void clear() throws CacheException { - try { - cache.clear(); - } catch (Exception e) { - throw new CacheException(e); - } - } - - public void stop() { - if (log.isTraceEnabled()) - log.trace("Stop " + cache); - cache.stop(); - } - - private static boolean isClusteredInvalidation(Configuration.CacheMode cacheMode) { - return cacheMode == Configuration.CacheMode.INVALIDATION_ASYNC - || cacheMode == Configuration.CacheMode.INVALIDATION_SYNC; - } - - private static boolean isClusteredReplication(Configuration.CacheMode cacheMode) { - return cacheMode == Configuration.CacheMode.REPL_ASYNC - || cacheMode == Configuration.CacheMode.REPL_SYNC; - } - - private static boolean isSynchronous(Configuration.CacheMode cacheMode) { - return cacheMode == Configuration.CacheMode.REPL_SYNC - || cacheMode == Configuration.CacheMode.INVALIDATION_SYNC - || cacheMode == Configuration.CacheMode.DIST_SYNC; - } - - public void addListener(Object listener) { - cache.addListener(listener); - } - - public AddressAdapter getAddress() { - RpcManager rpc = cache.getRpcManager(); - if (rpc != null) { - return AddressAdapterImpl.newInstance(rpc.getTransport().getAddress()); - } - return null; - } - - public List getMembers() { - RpcManager rpc = cache.getRpcManager(); - if (rpc != null) { - return AddressAdapterImpl.toAddressAdapter(rpc.getTransport().getMembers()); - } - return null; - } - - public int size() { - return cache.size(); - } - - public Map toMap() { - return cache; - } - - public void removeListener(Object listener) { - cache.removeListener(listener); - } - - public boolean containsKey(Object key) { - return cache.containsKey(key); - } - - public Configuration getConfiguration() { - return cache.getConfiguration(); - } - - @Override - public void broadcastEvictAll() { - RpcManager rpcManager = cache.getRpcManager(); - if (rpcManager != null) { - // Only broadcast evict all if it's clustered - EvictAllCommand cmd = cacheCmdInitializer.buildEvictAllCommand(cache.getName()); - rpcManager.broadcastRpcCommand(cmd, isSync); - } - } - - @Override - public T withinTx(Callable c) throws Exception { - return CacheHelper.withinTx(cache.getTransactionManager(), c); - } - - @Override - public Cache getCache() { - return cache; - } - - private Cache getFailSilentCache() { - return cache.withFlags(Flag.FAIL_SILENTLY); - } - - private Cache getSkipRemoteGetLoadCache() { - return cache.withFlags( - Flag.SKIP_CACHE_LOAD, Flag.SKIP_REMOTE_LOOKUP); - } - - private Cache getFailSilentCacheSkipRemotes() { - return cache.withFlags( - Flag.FAIL_SILENTLY, Flag.SKIP_CACHE_LOAD, Flag.SKIP_REMOTE_LOOKUP); - } - -} diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/Caches.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/Caches.java new file mode 100644 index 0000000000..0cef6bdd19 --- /dev/null +++ b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/Caches.java @@ -0,0 +1,141 @@ +/* + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other + * contributors as indicated by the @author tags. All rights reserved. + * See the copyright.txt in the distribution for a full listing of + * individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ + +package org.hibernate.cache.infinispan.util; + +import org.infinispan.AdvancedCache; +import org.infinispan.context.Flag; +import org.infinispan.remoting.rpc.RpcManager; + +import javax.transaction.Status; +import javax.transaction.TransactionManager; +import java.util.concurrent.Callable; + +/** + * Helper for dealing with Infinispan cache instances. + * + * @author Galder Zamarreño + * @since 4.1 + */ +public class Caches { + + private Caches() { + // Suppresses default constructor, ensuring non-instantiability. + } + + public static T withinTx(AdvancedCache cache, + Callable c) throws Exception { + // Retrieve transaction manager + return withinTx(cache.getTransactionManager(), c); + } + + public static T withinTx(TransactionManager tm, + Callable c) throws Exception { + tm.begin(); + try { + return c.call(); + } catch (Exception e) { + tm.setRollbackOnly(); + throw e; + } finally { + if (tm.getStatus() == Status.STATUS_ACTIVE) tm.commit(); + else tm.rollback(); + } + } + + public static AdvancedCache localCache(AdvancedCache cache) { + return cache.withFlags(Flag.CACHE_MODE_LOCAL); + } + + public static AdvancedCache ignoreReturnValuesCache(AdvancedCache cache) { + return cache.withFlags(Flag.SKIP_CACHE_LOAD, Flag.SKIP_REMOTE_LOOKUP); + } + + public static AdvancedCache ignoreReturnValuesCache( + AdvancedCache cache, Flag extraFlag) { + return cache.withFlags( + Flag.SKIP_CACHE_LOAD, Flag.SKIP_REMOTE_LOOKUP, extraFlag); + } + + public static AdvancedCache asyncWriteCache(AdvancedCache cache, + Flag extraFlag) { + return cache.withFlags( + Flag.SKIP_CACHE_LOAD, + Flag.SKIP_REMOTE_LOOKUP, + Flag.FORCE_ASYNCHRONOUS, + extraFlag); + } + + public static AdvancedCache failSilentWriteCache(AdvancedCache cache) { + return cache.withFlags( + Flag.FAIL_SILENTLY, + Flag.ZERO_LOCK_ACQUISITION_TIMEOUT, + Flag.SKIP_CACHE_LOAD, + Flag.SKIP_REMOTE_LOOKUP); + } + + public static AdvancedCache failSilentWriteCache(AdvancedCache cache, + Flag extraFlag) { + return cache.withFlags( + Flag.FAIL_SILENTLY, + Flag.ZERO_LOCK_ACQUISITION_TIMEOUT, + Flag.SKIP_CACHE_LOAD, + Flag.SKIP_REMOTE_LOOKUP, + extraFlag); + } + + public static AdvancedCache failSilentReadCache(AdvancedCache cache) { + return cache.withFlags( + Flag.FAIL_SILENTLY, + Flag.ZERO_LOCK_ACQUISITION_TIMEOUT); + } + + public static void broadcastEvictAll(AdvancedCache cache) { + RpcManager rpcManager = cache.getRpcManager(); + if (rpcManager != null) { + // Only broadcast evict all if it's clustered + CacheCommandInitializer factory = cache.getComponentRegistry() + .getComponent(CacheCommandInitializer.class); + boolean isSync = isSynchronousCache(cache); + + EvictAllCommand cmd = factory.buildEvictAllCommand(cache.getName()); + rpcManager.broadcastRpcCommand(cmd, isSync); + } + } + + public static boolean isInvalidationCache(AdvancedCache cache) { + return cache.getCacheConfiguration() + .clustering().cacheMode().isInvalidation(); + } + + public static boolean isSynchronousCache(AdvancedCache cache) { + return cache.getCacheConfiguration() + .clustering().cacheMode().isSynchronous(); + } + + public static boolean isClustered(AdvancedCache cache) { + return cache.getCacheConfiguration() + .clustering().cacheMode().isClustered(); + } + +} diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/FlagAdapter.java b/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/FlagAdapter.java deleted file mode 100644 index 77325dfc0d..0000000000 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/FlagAdapter.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * JBoss, Home of Professional Open Source. - * Copyright 2009, Red Hat, Inc. and/or its affiliates, and - * individual contributors as indicated by the @author tags. See the - * copyright.txt file in the distribution for a full listing of - * individual contributors. - * - * This is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as - * published by the Free Software Foundation; either version 2.1 of - * the License, or (at your option) any later version. - * - * This software is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this software; if not, write to the Free - * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA - * 02110-1301 USA, or see the FSF site: http://www.fsf.org. - */ -package org.hibernate.cache.infinispan.util; - -import org.infinispan.context.Flag; - -import org.hibernate.cache.CacheException; - -/** - * FlagAdapter. - * - * @author Galder Zamarreño - * @since 3.5 - */ -public enum FlagAdapter { - ZERO_LOCK_ACQUISITION_TIMEOUT, - CACHE_MODE_LOCAL, - FORCE_ASYNCHRONOUS, - FORCE_SYNCHRONOUS, - SKIP_CACHE_STORE, - SKIP_CACHE_LOAD; - - Flag toFlag() { - switch(this) { - case ZERO_LOCK_ACQUISITION_TIMEOUT: - return Flag.ZERO_LOCK_ACQUISITION_TIMEOUT; - case CACHE_MODE_LOCAL: - return Flag.CACHE_MODE_LOCAL; - case FORCE_ASYNCHRONOUS: - return Flag.FORCE_ASYNCHRONOUS; - case FORCE_SYNCHRONOUS: - return Flag.FORCE_SYNCHRONOUS; - case SKIP_CACHE_STORE: - return Flag.SKIP_CACHE_STORE; - case SKIP_CACHE_LOAD: - return Flag.SKIP_CACHE_LOAD; - default: - throw new CacheException("Unmatched Infinispan flag " + this); - } - } - - static Flag[] toFlags(FlagAdapter[] adapters) { - Flag[] flags = new Flag[adapters.length]; - for (int i = 0; i < adapters.length; i++) { - flags[i] = adapters[i].toFlag(); - } - return flags; - } -} diff --git a/hibernate-infinispan/src/main/resources/org/hibernate/cache/infinispan/builder/infinispan-configs.xml b/hibernate-infinispan/src/main/resources/org/hibernate/cache/infinispan/builder/infinispan-configs.xml index 0f1634b167..fcc49adceb 100644 --- a/hibernate-infinispan/src/main/resources/org/hibernate/cache/infinispan/builder/infinispan-configs.xml +++ b/hibernate-infinispan/src/main/resources/org/hibernate/cache/infinispan/builder/infinispan-configs.xml @@ -12,7 +12,7 @@ + value="${hibernate.cache.infinispan.jgroups_cfg:jgroups-tcp.xml}"/> diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractGeneralDataRegionTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractGeneralDataRegionTestCase.java index d7caa4459a..3117521feb 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractGeneralDataRegionTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractGeneralDataRegionTestCase.java @@ -25,13 +25,13 @@ package org.hibernate.test.cache.infinispan; import java.util.Set; +import org.infinispan.AdvancedCache; import org.infinispan.transaction.tm.BatchModeTransactionManager; import org.jboss.logging.Logger; import org.junit.Test; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.infinispan.util.CacheAdapter; import org.hibernate.cache.spi.GeneralDataRegion; import org.hibernate.cache.spi.QueryResultsRegion; import org.hibernate.cache.spi.Region; @@ -152,7 +152,7 @@ public abstract class AbstractGeneralDataRegionTestCase extends AbstractRegionIm cfg, getCacheTestSupport() ); - CacheAdapter localCache = getInfinispanCache( regionFactory ); + AdvancedCache localCache = getInfinispanCache( regionFactory ); // Sleep a bit to avoid concurrent FLUSH problem avoidConcurrentFlush(); @@ -170,7 +170,7 @@ public abstract class AbstractGeneralDataRegionTestCase extends AbstractRegionIm cfg, getCacheTestSupport() ); - CacheAdapter remoteCache = getInfinispanCache( regionFactory ); + AdvancedCache remoteCache = getInfinispanCache( regionFactory ); // Sleep a bit to avoid concurrent FLUSH problem avoidConcurrentFlush(); @@ -229,4 +229,4 @@ public abstract class AbstractGeneralDataRegionTestCase extends AbstractRegionIm log.error( e.getMessage(), e ); } } -} \ No newline at end of file +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractNonFunctionalTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractNonFunctionalTestCase.java index 7cda278b72..a5a8a5b601 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractNonFunctionalTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractNonFunctionalTestCase.java @@ -56,7 +56,7 @@ public abstract class AbstractNonFunctionalTestCase extends org.hibernate.testin preferIPv4Stack = System.getProperty(PREFER_IPV4STACK); System.setProperty(PREFER_IPV4STACK, "true"); jgroupsCfgFile = System.getProperty(JGROUPS_CFG_FILE); - System.setProperty(JGROUPS_CFG_FILE, "stacks/tcp.xml"); + System.setProperty(JGROUPS_CFG_FILE, "2lc-test-tcp.xml"); testSupport.setUp(); } @@ -98,4 +98,4 @@ public abstract class AbstractNonFunctionalTestCase extends org.hibernate.testin return keys.size(); } -} \ No newline at end of file +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractRegionImplTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractRegionImplTestCase.java index 27e4e393ea..13497fcdb8 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractRegionImplTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/AbstractRegionImplTestCase.java @@ -26,11 +26,11 @@ package org.hibernate.test.cache.infinispan; import java.util.Properties; import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.infinispan.util.CacheAdapter; import org.hibernate.cache.internal.CacheDataDescriptionImpl; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.Region; import org.hibernate.internal.util.compare.ComparableComparator; +import org.infinispan.AdvancedCache; /** * Base class for tests of Region implementations. @@ -40,7 +40,7 @@ import org.hibernate.internal.util.compare.ComparableComparator; */ public abstract class AbstractRegionImplTestCase extends AbstractNonFunctionalTestCase { - protected abstract CacheAdapter getInfinispanCache(InfinispanRegionFactory regionFactory); + protected abstract AdvancedCache getInfinispanCache(InfinispanRegionFactory regionFactory); protected abstract Region createRegion(InfinispanRegionFactory regionFactory, String regionName, Properties properties, CacheDataDescription cdd); diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/InfinispanRegionFactoryTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/InfinispanRegionFactoryTestCase.java index 89f04fb7da..c0c98d7052 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/InfinispanRegionFactoryTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/InfinispanRegionFactoryTestCase.java @@ -21,31 +21,6 @@ */ package org.hibernate.test.cache.infinispan; -import java.util.Properties; - -import org.infinispan.config.Configuration; -import org.infinispan.config.Configuration.CacheMode; -import org.infinispan.eviction.EvictionStrategy; -import org.infinispan.manager.DefaultCacheManager; -import org.infinispan.manager.EmbeddedCacheManager; -import org.junit.Test; - -import org.hibernate.cache.CacheException; -import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.infinispan.collection.CollectionRegionImpl; -import org.hibernate.cache.infinispan.entity.EntityRegionImpl; -import org.hibernate.cache.infinispan.query.QueryResultsRegionImpl; -import org.hibernate.cache.infinispan.timestamp.TimestampsRegionImpl; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.internal.RegionFactoryInitiator; -import org.hibernate.cache.spi.RegionFactory; -import org.hibernate.cfg.AvailableSettings; -import org.hibernate.engine.spi.SessionFactoryImplementor; -import org.hibernate.engine.transaction.jta.platform.internal.JBossStandAloneJtaPlatform; -import org.hibernate.service.ServiceRegistry; - -import org.hibernate.testing.ServiceRegistryBuilder; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @@ -53,598 +28,556 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.util.Map; +import java.util.Properties; + +import javax.transaction.TransactionManager; + +import org.hibernate.cache.CacheException; +import org.hibernate.cache.infinispan.InfinispanRegionFactory; +import org.hibernate.cache.infinispan.collection.CollectionRegionImpl; +import org.hibernate.cache.infinispan.entity.EntityRegionImpl; +import org.hibernate.cache.infinispan.query.QueryResultsRegionImpl; +import org.hibernate.cache.infinispan.timestamp.TimestampsRegionImpl; +import org.hibernate.cache.infinispan.tm.HibernateTransactionManagerLookup; +import org.hibernate.cache.spi.RegionFactory; +import org.hibernate.cfg.Settings; +import org.hibernate.engine.spi.SessionFactoryImplementor; +import org.hibernate.engine.transaction.jta.platform.internal.AbstractJtaPlatform; +import org.hibernate.engine.transaction.jta.platform.internal.JBossStandAloneJtaPlatform; +import org.hibernate.service.ServiceRegistry; +import org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase; +import org.hibernate.testing.ServiceRegistryBuilder; +import org.infinispan.AdvancedCache; +import org.infinispan.configuration.cache.CacheMode; +import org.infinispan.configuration.cache.Configuration; +import org.infinispan.configuration.cache.ConfigurationBuilder; +import org.infinispan.eviction.EvictionStrategy; +import org.infinispan.manager.DefaultCacheManager; +import org.infinispan.manager.EmbeddedCacheManager; +import org.junit.Test; + /** * InfinispanRegionFactoryTestCase. * * @author Galder Zamarreño * @since 3.5 */ -public class InfinispanRegionFactoryTestCase { - @Test - public void testConfigurationProcessing() { - final String person = "com.acme.Person"; - final String addresses = "com.acme.Person.addresses"; - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.cfg", "person-cache" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.eviction.strategy", "LRU" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.eviction.wake_up_interval", "2000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.eviction.max_entries", "5000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.cfg", "person-addresses-cache" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.expiration.lifespan", "120000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.expiration.max_idle", "60000" ); - p.setProperty( "hibernate.cache.infinispan.query.cfg", "my-query-cache" ); - p.setProperty( "hibernate.cache.infinispan.query.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.query.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.query.eviction.max_entries", "10000" ); +public class InfinispanRegionFactoryTestCase { - SessionFactoryImplementor sf = createSessionFactory(null, p ); - try { - InfinispanRegionFactory factory = (InfinispanRegionFactory) sf.getServiceRegistry() - .getService( RegionFactory.class ); + @Test + public void testConfigurationProcessing() { + final String person = "com.acme.Person"; + final String addresses = "com.acme.Person.addresses"; + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.cfg", "person-cache"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.eviction.strategy", "LRU"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.eviction.wake_up_interval", "2000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.eviction.max_entries", "5000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.cfg", "person-addresses-cache"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.expiration.lifespan", "120000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.expiration.max_idle", "60000"); + p.setProperty("hibernate.cache.infinispan.query.cfg", "my-query-cache"); + p.setProperty("hibernate.cache.infinispan.query.eviction.strategy", "LIRS"); + p.setProperty("hibernate.cache.infinispan.query.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.query.eviction.max_entries", "10000"); - assertEquals( "entity", factory.getTypeOverrides().get( "entity" ).getCacheName() ); - assertEquals( "entity", factory.getTypeOverrides().get( "collection" ).getCacheName() ); - assertEquals( "timestamps", factory.getTypeOverrides().get( "timestamps" ).getCacheName() ); + InfinispanRegionFactory factory = createRegionFactory(p); - assertEquals( "person-cache", factory.getTypeOverrides().get( person ).getCacheName() ); - assertEquals( EvictionStrategy.LRU, factory.getTypeOverrides().get( person ).getEvictionStrategy() ); - assertEquals( 2000, factory.getTypeOverrides().get( person ).getEvictionWakeUpInterval() ); - assertEquals( 5000, factory.getTypeOverrides().get( person ).getEvictionMaxEntries() ); - assertEquals( 60000, factory.getTypeOverrides().get( person ).getExpirationLifespan() ); - assertEquals( 30000, factory.getTypeOverrides().get( person ).getExpirationMaxIdle() ); + try { + assertEquals("entity", factory.getTypeOverrides().get("entity").getCacheName()); + assertEquals("entity", factory.getTypeOverrides().get("collection").getCacheName()); + assertEquals("timestamps", factory.getTypeOverrides().get("timestamps").getCacheName()); - assertEquals( "person-addresses-cache", factory.getTypeOverrides().get( addresses ).getCacheName() ); - assertEquals( 120000, factory.getTypeOverrides().get( addresses ).getExpirationLifespan() ); - assertEquals( 60000, factory.getTypeOverrides().get( addresses ).getExpirationMaxIdle() ); + assertEquals("person-cache", factory.getTypeOverrides().get(person).getCacheName()); + assertEquals(EvictionStrategy.LRU, factory.getTypeOverrides().get(person).getEvictionStrategy()); + assertEquals(2000, factory.getTypeOverrides().get(person).getEvictionWakeUpInterval()); + assertEquals(5000, factory.getTypeOverrides().get(person).getEvictionMaxEntries()); + assertEquals(60000, factory.getTypeOverrides().get(person).getExpirationLifespan()); + assertEquals(30000, factory.getTypeOverrides().get(person).getExpirationMaxIdle()); - assertEquals( "my-query-cache", factory.getTypeOverrides().get( "query" ).getCacheName() ); - assertEquals( EvictionStrategy.FIFO, factory.getTypeOverrides().get( "query" ).getEvictionStrategy() ); - assertEquals( 3000, factory.getTypeOverrides().get( "query" ).getEvictionWakeUpInterval() ); - assertEquals( 10000, factory.getTypeOverrides().get( "query" ).getEvictionMaxEntries() ); - } - finally { - sf.close(); - } - } + assertEquals("person-addresses-cache", factory.getTypeOverrides().get(addresses).getCacheName()); + assertEquals(120000, factory.getTypeOverrides().get(addresses).getExpirationLifespan()); + assertEquals(60000, factory.getTypeOverrides().get(addresses).getExpirationMaxIdle()); - @Test - public void testBuildEntityCollectionRegionsPersonPlusEntityCollectionOverrides() { - final String person = "com.acme.Person"; - final String address = "com.acme.Address"; - final String car = "com.acme.Car"; - final String addresses = "com.acme.Person.addresses"; - final String parts = "com.acme.Car.parts"; - Properties p = new Properties(); - // First option, cache defined for entity and overrides for generic entity data type and entity itself. - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.cfg", "person-cache" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.eviction.strategy", "LRU" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.eviction.wake_up_interval", "2000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.eviction.max_entries", "5000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000" ); - p.setProperty( "hibernate.cache.infinispan.entity.cfg", "myentity-cache" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.max_entries", "20000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.cfg", "addresses-cache" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.eviction.wake_up_interval", "2500" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.eviction.max_entries", "5500" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.expiration.lifespan", "65000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.addresses.expiration.max_idle", "35000" ); - p.setProperty( "hibernate.cache.infinispan.collection.cfg", "mycollection-cache" ); - p.setProperty( "hibernate.cache.infinispan.collection.eviction.strategy", "LRU" ); - p.setProperty( "hibernate.cache.infinispan.collection.eviction.wake_up_interval", "3500" ); - p.setProperty( "hibernate.cache.infinispan.collection.eviction.max_entries", "25000" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); + assertEquals("my-query-cache", factory.getTypeOverrides().get("query").getCacheName()); + assertEquals(EvictionStrategy.LIRS, factory.getTypeOverrides().get("query").getEvictionStrategy()); + assertEquals(3000, factory.getTypeOverrides().get("query").getEvictionWakeUpInterval()); + assertEquals(10000, factory.getTypeOverrides().get("query").getEvictionMaxEntries()); + } finally { + factory.stop(); + } + } - try { - InfinispanRegionFactory factory = (InfinispanRegionFactory) sf.getServiceRegistry() - .getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - assertFalse( manager.getGlobalConfiguration().isExposeGlobalJmxStatistics() ); - assertNotNull( factory.getTypeOverrides().get( person ) ); - assertFalse( factory.getDefinedConfigurations().contains( person ) ); - assertNotNull( factory.getTypeOverrides().get( addresses ) ); - assertFalse( factory.getDefinedConfigurations().contains( addresses ) ); - CacheAdapter cache = null; + @Test + public void testBuildEntityCollectionRegionsPersonPlusEntityCollectionOverrides() { + final String person = "com.acme.Person"; + final String address = "com.acme.Address"; + final String car = "com.acme.Car"; + final String addresses = "com.acme.Person.addresses"; + final String parts = "com.acme.Car.parts"; + Properties p = new Properties(); + // First option, cache defined for entity and overrides for generic entity data type and entity itself. + p.setProperty("hibernate.cache.infinispan.com.acme.Person.cfg", "person-cache"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.eviction.strategy", "LRU"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.eviction.wake_up_interval", "2000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.eviction.max_entries", "5000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000"); + p.setProperty("hibernate.cache.infinispan.entity.cfg", "myentity-cache"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.strategy", "LIRS"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.max_entries", "20000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.cfg", "addresses-cache"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.eviction.strategy", "LIRS"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.eviction.wake_up_interval", "2500"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.eviction.max_entries", "5500"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.expiration.lifespan", "65000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.addresses.expiration.max_idle", "35000"); + p.setProperty("hibernate.cache.infinispan.collection.cfg", "mycollection-cache"); + p.setProperty("hibernate.cache.infinispan.collection.eviction.strategy", "LRU"); + p.setProperty("hibernate.cache.infinispan.collection.eviction.wake_up_interval", "3500"); + p.setProperty("hibernate.cache.infinispan.collection.eviction.max_entries", "25000"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + EmbeddedCacheManager manager = factory.getCacheManager(); + assertFalse(manager.getCacheManagerConfiguration() + .globalJmxStatistics().enabled()); + assertNotNull(factory.getTypeOverrides().get(person)); + assertFalse(factory.getDefinedConfigurations().contains(person)); + assertNotNull(factory.getTypeOverrides().get(addresses)); + assertFalse(factory.getDefinedConfigurations().contains(addresses)); + AdvancedCache cache; - EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion( person, p, null ); - assertNotNull( factory.getTypeOverrides().get( person ) ); - assertTrue( factory.getDefinedConfigurations().contains( person ) ); - assertNull( factory.getTypeOverrides().get( address ) ); - cache = region.getCacheAdapter(); - Configuration cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.LRU, cacheCfg.getEvictionStrategy() ); - assertEquals( 2000, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 5000, cacheCfg.getEvictionMaxEntries() ); - assertEquals( 60000, cacheCfg.getExpirationLifespan() ); - assertEquals( 30000, cacheCfg.getExpirationMaxIdle() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); + EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion(person, p, null); + assertNotNull(factory.getTypeOverrides().get(person)); + assertTrue(factory.getDefinedConfigurations().contains(person)); + assertNull(factory.getTypeOverrides().get(address)); + cache = region.getCache(); + Configuration cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LRU, cacheCfg.eviction().strategy()); + assertEquals(2000, cacheCfg.expiration().wakeUpInterval()); + assertEquals(5000, cacheCfg.eviction().maxEntries()); + assertEquals(60000, cacheCfg.expiration().lifespan()); + assertEquals(30000, cacheCfg.expiration().maxIdle()); + assertFalse(cacheCfg.jmxStatistics().enabled()); - region = (EntityRegionImpl) factory.buildEntityRegion( address, p, null ); - assertNotNull( factory.getTypeOverrides().get( person ) ); - assertTrue( factory.getDefinedConfigurations().contains( person ) ); - assertNull( factory.getTypeOverrides().get( address ) ); - cache = region.getCacheAdapter(); - cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.FIFO, cacheCfg.getEvictionStrategy() ); - assertEquals( 3000, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 20000, cacheCfg.getEvictionMaxEntries() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); + region = (EntityRegionImpl) factory.buildEntityRegion(address, p, null); + assertNotNull(factory.getTypeOverrides().get(person)); + assertTrue(factory.getDefinedConfigurations().contains(person)); + assertNull(factory.getTypeOverrides().get(address)); + cache = region.getCache(); + cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LIRS, cacheCfg.eviction().strategy()); + assertEquals(3000, cacheCfg.expiration().wakeUpInterval()); + assertEquals(20000, cacheCfg.eviction().maxEntries()); + assertFalse(cacheCfg.jmxStatistics().enabled()); - region = (EntityRegionImpl) factory.buildEntityRegion( car, p, null ); - assertNotNull( factory.getTypeOverrides().get( person ) ); - assertTrue( factory.getDefinedConfigurations().contains( person ) ); - assertNull( factory.getTypeOverrides().get( address ) ); - cache = region.getCacheAdapter(); - cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.FIFO, cacheCfg.getEvictionStrategy() ); - assertEquals( 3000, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 20000, cacheCfg.getEvictionMaxEntries() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); + region = (EntityRegionImpl) factory.buildEntityRegion(car, p, null); + assertNotNull(factory.getTypeOverrides().get(person)); + assertTrue(factory.getDefinedConfigurations().contains(person)); + assertNull(factory.getTypeOverrides().get(address)); + cache = region.getCache(); + cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LIRS, cacheCfg.eviction().strategy()); + assertEquals(3000, cacheCfg.expiration().wakeUpInterval()); + assertEquals(20000, cacheCfg.eviction().maxEntries()); + assertFalse(cacheCfg.jmxStatistics().enabled()); - CollectionRegionImpl collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion( - addresses, - p, - null - ); - assertNotNull( factory.getTypeOverrides().get( addresses ) ); - assertTrue( factory.getDefinedConfigurations().contains( person ) ); - assertNull( factory.getTypeOverrides().get( parts ) ); - cache = collectionRegion.getCacheAdapter(); - cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.FIFO, cacheCfg.getEvictionStrategy() ); - assertEquals( 2500, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 5500, cacheCfg.getEvictionMaxEntries() ); - assertEquals( 65000, cacheCfg.getExpirationLifespan() ); - assertEquals( 35000, cacheCfg.getExpirationMaxIdle() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); + CollectionRegionImpl collectionRegion = (CollectionRegionImpl) + factory.buildCollectionRegion(addresses, p, null); + assertNotNull(factory.getTypeOverrides().get(addresses)); + assertTrue(factory.getDefinedConfigurations().contains(person)); + assertNull(factory.getTypeOverrides().get(parts)); + cache = collectionRegion .getCache(); + cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LIRS, cacheCfg.eviction().strategy()); + assertEquals(2500, cacheCfg.expiration().wakeUpInterval()); + assertEquals(5500, cacheCfg.eviction().maxEntries()); + assertEquals(65000, cacheCfg.expiration().lifespan()); + assertEquals(35000, cacheCfg.expiration().maxIdle()); + assertFalse(cacheCfg.jmxStatistics().enabled()); - collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion( parts, p, null ); - assertNotNull( factory.getTypeOverrides().get( addresses ) ); - assertTrue( factory.getDefinedConfigurations().contains( addresses ) ); - assertNull( factory.getTypeOverrides().get( parts ) ); - cache = collectionRegion.getCacheAdapter(); - cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.LRU, cacheCfg.getEvictionStrategy() ); - assertEquals( 3500, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 25000, cacheCfg.getEvictionMaxEntries() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); + collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion(parts, p, null); + assertNotNull(factory.getTypeOverrides().get(addresses)); + assertTrue(factory.getDefinedConfigurations().contains(addresses)); + assertNull(factory.getTypeOverrides().get(parts)); + cache = collectionRegion.getCache(); + cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LRU, cacheCfg.eviction().strategy()); + assertEquals(3500, cacheCfg.expiration().wakeUpInterval()); + assertEquals(25000, cacheCfg.eviction().maxEntries()); + assertFalse(cacheCfg.jmxStatistics().enabled()); - collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion( parts, p, null ); - assertNotNull( factory.getTypeOverrides().get( addresses ) ); - assertTrue( factory.getDefinedConfigurations().contains( addresses ) ); - assertNull( factory.getTypeOverrides().get( parts ) ); - cache = collectionRegion.getCacheAdapter(); - cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.LRU, cacheCfg.getEvictionStrategy() ); - assertEquals( 3500, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 25000, cacheCfg.getEvictionMaxEntries() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); - } - finally { - sf.close(); - } - } + collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion(parts, p, null); + assertNotNull(factory.getTypeOverrides().get(addresses)); + assertTrue(factory.getDefinedConfigurations().contains(addresses)); + assertNull(factory.getTypeOverrides().get(parts)); + cache = collectionRegion.getCache(); + cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LRU, cacheCfg.eviction().strategy()); + assertEquals(3500, cacheCfg.expiration().wakeUpInterval()); + assertEquals(25000, cacheCfg.eviction().maxEntries()); + assertFalse(cacheCfg.jmxStatistics().enabled()); + } finally { + factory.stop(); + } + } - @Test - public void testBuildEntityCollectionRegionOverridesOnly() { - CacheAdapter cache; - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.max_entries", "30000" ); - p.setProperty( "hibernate.cache.infinispan.collection.eviction.strategy", "LRU" ); - p.setProperty( "hibernate.cache.infinispan.collection.eviction.wake_up_interval", "3500" ); - p.setProperty( "hibernate.cache.infinispan.collection.eviction.max_entries", "35000" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - factory.getCacheManager(); - try { - EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion( "com.acme.Address", p, null ); - assertNull( factory.getTypeOverrides().get( "com.acme.Address" ) ); - cache = region.getCacheAdapter(); - Configuration cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.FIFO, cacheCfg.getEvictionStrategy() ); - assertEquals( 3000, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 30000, cacheCfg.getEvictionMaxEntries() ); - assertEquals( 100000, cacheCfg.getExpirationMaxIdle() ); + @Test + public void testBuildEntityCollectionRegionOverridesOnly() { + AdvancedCache cache; + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.entity.eviction.strategy", "LIRS"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.max_entries", "30000"); + p.setProperty("hibernate.cache.infinispan.collection.eviction.strategy", "LRU"); + p.setProperty("hibernate.cache.infinispan.collection.eviction.wake_up_interval", "3500"); + p.setProperty("hibernate.cache.infinispan.collection.eviction.max_entries", "35000"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + factory.getCacheManager(); + EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion("com.acme.Address", p, null); + assertNull(factory.getTypeOverrides().get("com.acme.Address")); + cache = region.getCache(); + Configuration cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LIRS, cacheCfg.eviction().strategy()); + assertEquals(3000, cacheCfg.expiration().wakeUpInterval()); + assertEquals(30000, cacheCfg.eviction().maxEntries()); + // Max idle value comes from base XML configuration + assertEquals(100000, cacheCfg.expiration().maxIdle()); - CollectionRegionImpl collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion( - "com.acme.Person.addresses", - p, - null - ); - assertNull( factory.getTypeOverrides().get( "com.acme.Person.addresses" ) ); - cache = collectionRegion.getCacheAdapter(); - cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.LRU, cacheCfg.getEvictionStrategy() ); - assertEquals( 3500, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 35000, cacheCfg.getEvictionMaxEntries() ); - assertEquals( 100000, cacheCfg.getExpirationMaxIdle() ); - } - finally { - sf.close(); - } - } + CollectionRegionImpl collectionRegion = (CollectionRegionImpl) + factory.buildCollectionRegion("com.acme.Person.addresses", p, null); + assertNull(factory.getTypeOverrides().get("com.acme.Person.addresses")); + cache = collectionRegion.getCache(); + cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LRU, cacheCfg.eviction().strategy()); + assertEquals(3500, cacheCfg.expiration().wakeUpInterval()); + assertEquals(35000, cacheCfg.eviction().maxEntries()); + assertEquals(100000, cacheCfg.expiration().maxIdle()); + } finally { + factory.stop(); + } + } + @Test + public void testBuildEntityRegionPersonPlusEntityOverridesWithoutCfg() { + final String person = "com.acme.Person"; + Properties p = new Properties(); + // Third option, no cache defined for entity and overrides for generic entity data type and entity itself. + p.setProperty("hibernate.cache.infinispan.com.acme.Person.eviction.strategy", "LRU"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000"); + p.setProperty("hibernate.cache.infinispan.entity.cfg", "myentity-cache"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.strategy", "FIFO"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.max_entries", "10000"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + factory.getCacheManager(); + assertNotNull(factory.getTypeOverrides().get(person)); + assertFalse(factory.getDefinedConfigurations().contains(person)); + EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion(person, p, null); + assertNotNull(factory.getTypeOverrides().get(person)); + assertTrue(factory.getDefinedConfigurations().contains(person)); + AdvancedCache cache = region.getCache(); + Configuration cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LRU, cacheCfg.eviction().strategy()); + assertEquals(3000, cacheCfg.expiration().wakeUpInterval()); + assertEquals(10000, cacheCfg.eviction().maxEntries()); + assertEquals(60000, cacheCfg.expiration().lifespan()); + assertEquals(30000, cacheCfg.expiration().maxIdle()); + } finally { + factory.stop(); + } + } - @Test - public void testBuildEntityRegionPersonPlusEntityOverridesWithoutCfg() { - final String person = "com.acme.Person"; - Properties p = new Properties(); - // Third option, no cache defined for entity and overrides for generic entity data type and entity itself. - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.eviction.strategy", "LRU" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000" ); - p.setProperty( "hibernate.cache.infinispan.entity.cfg", "myentity-cache" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.max_entries", "10000" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - try { - assertNotNull( factory.getTypeOverrides().get( person ) ); - assertFalse( factory.getDefinedConfigurations().contains( person ) ); - EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion( person, p, null ); - assertNotNull( factory.getTypeOverrides().get( person ) ); - assertTrue( factory.getDefinedConfigurations().contains( person ) ); - CacheAdapter cache = region.getCacheAdapter(); - Configuration cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.LRU, cacheCfg.getEvictionStrategy() ); - assertEquals( 3000, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 10000, cacheCfg.getEvictionMaxEntries() ); - assertEquals( 60000, cacheCfg.getExpirationLifespan() ); - assertEquals( 30000, cacheCfg.getExpirationMaxIdle() ); - } - finally { - sf.close(); - } - } + @Test + public void testTimestampValidation() { + Properties p = new Properties(); + final DefaultCacheManager manager = new DefaultCacheManager(); + InfinispanRegionFactory factory = createRegionFactory(manager, p); + ConfigurationBuilder builder = new ConfigurationBuilder(); + builder.clustering().cacheMode(CacheMode.INVALIDATION_SYNC); + manager.defineConfiguration("timestamps", builder.build()); + try { + factory.start(null, p); + fail("Should have failed saying that invalidation is not allowed for timestamp caches."); + } catch(CacheException ce) { + } + } - @Test - public void testTimestampValidation() { - Properties p = new Properties(); - final DefaultCacheManager manager = new DefaultCacheManager(); - SessionFactoryImplementor sf = null; - try { - sf = createSessionFactory( manager, p ); - InfinispanRegionFactory factory = (InfinispanRegionFactory) sf.getServiceRegistry() - .getService( RegionFactory.class ); - Configuration config = new Configuration(); - config.setCacheMode( CacheMode.INVALIDATION_SYNC ); - manager.defineConfiguration( "timestamps", config ); + @Test + public void testBuildDefaultTimestampsRegion() { + final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; + Properties p = new Properties(); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + assertTrue(factory.getDefinedConfigurations().contains("timestamps")); + assertTrue(factory.getTypeOverrides().get("timestamps") + .getCacheName().equals("timestamps")); + TimestampsRegionImpl region = (TimestampsRegionImpl) + factory.buildTimestampsRegion(timestamps, p); + AdvancedCache cache = region.getCache(); + Configuration cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.NONE, cacheCfg.eviction().strategy()); + assertEquals(CacheMode.REPL_ASYNC, cacheCfg.clustering().cacheMode()); + assertTrue(cacheCfg.storeAsBinary().enabled()); + assertFalse(cacheCfg.jmxStatistics().enabled()); + } finally { + factory.stop(); + } + } - factory.start( null, p ); - fail( "Should have failed saying that invalidation is not allowed for timestamp caches." ); - } - catch ( CacheException ce ) { - } - finally { - if ( sf != null ) { - sf.close(); - } - } - } + @Test + public void testBuildDiffCacheNameTimestampsRegion() { + final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.timestamps.cfg", "unrecommended-timestamps"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + EmbeddedCacheManager manager = factory.getCacheManager(); + assertFalse(factory.getDefinedConfigurations().contains("timestamp")); + assertTrue(factory.getDefinedConfigurations().contains("unrecommended-timestamps")); + assertTrue(factory.getTypeOverrides().get("timestamps").getCacheName().equals("unrecommended-timestamps")); + ConfigurationBuilder builder = new ConfigurationBuilder(); + builder.clustering().stateTransfer().fetchInMemoryState(true); + builder.clustering().cacheMode(CacheMode.REPL_SYNC); + manager.defineConfiguration("unrecommended-timestamps", builder.build()); + TimestampsRegionImpl region = (TimestampsRegionImpl) factory.buildTimestampsRegion(timestamps, p); + AdvancedCache cache = region.getCache(); + Configuration cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.NONE, cacheCfg.eviction().strategy()); + assertEquals(CacheMode.REPL_SYNC, cacheCfg.clustering().cacheMode()); + assertFalse(cacheCfg.storeAsBinary().enabled()); + assertFalse(cacheCfg.jmxStatistics().enabled()); + } finally { + factory.stop(); + } + } - @Test - public void testBuildDefaultTimestampsRegion() { - final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; - Properties p = new Properties(); - SessionFactoryImplementor sf = createSessionFactory( null, p ); + @Test + public void testBuildTimestamRegionWithCacheNameOverride() { + final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.timestamps.cfg", "mytimestamps-cache"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + factory.buildTimestampsRegion(timestamps, p); + assertTrue(factory.getDefinedConfigurations().contains("mytimestamps-cache")); + } finally { + factory.stop(); + } + } - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - try { - assertTrue( factory.getDefinedConfigurations().contains( "timestamps" ) ); - assertTrue( factory.getTypeOverrides().get( "timestamps" ).getCacheName().equals( "timestamps" ) ); - Configuration config = new Configuration(); - config.setFetchInMemoryState( false ); - manager.defineConfiguration( "timestamps", config ); - TimestampsRegionImpl region = (TimestampsRegionImpl) factory.buildTimestampsRegion( timestamps, p ); - CacheAdapter cache = region.getCacheAdapter(); - Configuration cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.NONE, cacheCfg.getEvictionStrategy() ); - assertEquals( CacheMode.REPL_ASYNC, cacheCfg.getCacheMode() ); - assertTrue( cacheCfg.isUseLazyDeserialization() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); - } - finally { - sf.close(); - } - } + @Test + public void testBuildTimestamRegionWithFifoEvictionOverride() { + final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.timestamps.cfg", "mytimestamps-cache"); + p.setProperty("hibernate.cache.infinispan.timestamps.eviction.strategy", "FIFO"); + p.setProperty("hibernate.cache.infinispan.timestamps.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.timestamps.eviction.max_entries", "10000"); + InfinispanRegionFactory factory = null; + try { + factory = createRegionFactory(p); + factory.buildTimestampsRegion(timestamps, p); + assertTrue(factory.getDefinedConfigurations().contains("mytimestamps-cache")); + fail("Should fail cos no eviction configurations are allowed for timestamp caches"); + } catch(CacheException ce) { + } finally { + if (factory != null) factory.stop(); + } + } - @Test - public void testBuildDiffCacheNameTimestampsRegion() { - final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.timestamps.cfg", "unrecommended-timestamps" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); + @Test + public void testBuildTimestamRegionWithNoneEvictionOverride() { + final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.timestamps.cfg", "timestamps-none-eviction"); + p.setProperty("hibernate.cache.infinispan.timestamps.eviction.strategy", "NONE"); + p.setProperty("hibernate.cache.infinispan.timestamps.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.timestamps.eviction.max_entries", "0"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + factory.buildTimestampsRegion(timestamps, p); + assertTrue(factory.getDefinedConfigurations().contains("timestamps-none-eviction")); + } finally { + factory.stop(); + } + } - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - try { - assertFalse( factory.getDefinedConfigurations().contains( "timestamp" ) ); - assertTrue( factory.getDefinedConfigurations().contains( "unrecommended-timestamps" ) ); - assertTrue( - factory.getTypeOverrides() - .get( "timestamps" ) - .getCacheName() - .equals( "unrecommended-timestamps" ) - ); - Configuration config = new Configuration(); - config.setFetchInMemoryState( false ); - config.setCacheMode( CacheMode.REPL_SYNC ); - manager.defineConfiguration( "unrecommended-timestamps", config ); - TimestampsRegionImpl region = (TimestampsRegionImpl) factory.buildTimestampsRegion( timestamps, p ); - CacheAdapter cache = region.getCacheAdapter(); - Configuration cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.NONE, cacheCfg.getEvictionStrategy() ); - assertEquals( CacheMode.REPL_SYNC, cacheCfg.getCacheMode() ); - assertFalse( cacheCfg.isUseLazyDeserialization() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); - } - finally { - sf.close(); - } - } + @Test + public void testBuildQueryRegion() { + final String query = "org.hibernate.cache.internal.StandardQueryCache"; + Properties p = new Properties(); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + assertTrue(factory.getDefinedConfigurations().contains("local-query")); + QueryResultsRegionImpl region = (QueryResultsRegionImpl) factory.buildQueryResultsRegion(query, p); + AdvancedCache cache = region.getCache(); + Configuration cacheCfg = cache.getCacheConfiguration(); + assertEquals(CacheMode.LOCAL, cacheCfg.clustering().cacheMode()); + assertFalse(cacheCfg.jmxStatistics().enabled()); + } finally { + factory.stop(); + } + } - @Test - public void testBuildTimestamRegionWithCacheNameOverride() { - final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.timestamps.cfg", "mytimestamps-cache" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - try { - factory.buildTimestampsRegion( timestamps, p ); - assertTrue( factory.getDefinedConfigurations().contains( "mytimestamps-cache" ) ); - } - finally { - sf.close(); - } - } + @Test + public void testBuildQueryRegionWithCustomRegionName() { + final String queryRegionName = "myquery"; + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.myquery.cfg", "timestamps-none-eviction"); + p.setProperty("hibernate.cache.infinispan.myquery.eviction.strategy", "LIRS"); + p.setProperty("hibernate.cache.infinispan.myquery.eviction.wake_up_interval", "2222"); + p.setProperty("hibernate.cache.infinispan.myquery.eviction.max_entries", "11111"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + assertTrue(factory.getDefinedConfigurations().contains("local-query")); + QueryResultsRegionImpl region = (QueryResultsRegionImpl) factory.buildQueryResultsRegion(queryRegionName, p); + assertNotNull(factory.getTypeOverrides().get(queryRegionName)); + assertTrue(factory.getDefinedConfigurations().contains(queryRegionName)); + AdvancedCache cache = region.getCache(); + Configuration cacheCfg = cache.getCacheConfiguration(); + assertEquals(EvictionStrategy.LIRS, cacheCfg.eviction().strategy()); + assertEquals(2222, cacheCfg.expiration().wakeUpInterval()); + assertEquals(11111, cacheCfg.eviction().maxEntries()); + } finally { + factory.stop(); + } + } + @Test + public void testEnableStatistics() { + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.statistics", "true"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000"); + p.setProperty("hibernate.cache.infinispan.entity.cfg", "myentity-cache"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.strategy", "FIFO"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.max_entries", "10000"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + EmbeddedCacheManager manager = factory.getCacheManager(); + assertTrue(manager.getCacheManagerConfiguration().globalJmxStatistics().enabled()); + EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion("com.acme.Address", p, null); + AdvancedCache cache = region.getCache(); + assertTrue(factory.getTypeOverrides().get("entity").isExposeStatistics()); + assertTrue(cache.getCacheConfiguration().jmxStatistics().enabled()); - @Test - public void testBuildTimestamRegionWithFifoEvictionOverride() { - final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.timestamps.cfg", "mytimestamps-cache" ); - p.setProperty( "hibernate.cache.infinispan.timestamps.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.timestamps.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.timestamps.eviction.max_entries", "10000" ); - InfinispanRegionFactory factory = null; - SessionFactoryImplementor sf = null; - try { - sf = createSessionFactory( null, p ); - factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - factory.buildTimestampsRegion( timestamps, p ); - assertTrue( factory.getDefinedConfigurations().contains( "mytimestamps-cache" ) ); - fail( "Should fail cos no eviction configurations are allowed for timestamp caches" ); - } - catch ( CacheException ce ) { - } - finally { - if ( sf != null ) { - sf.close(); - } - } - } + region = (EntityRegionImpl) factory.buildEntityRegion("com.acme.Person", p, null); + cache = region.getCache(); + assertTrue(factory.getTypeOverrides().get("com.acme.Person").isExposeStatistics()); + assertTrue(cache.getCacheConfiguration().jmxStatistics().enabled()); - @Test - public void testBuildTimestamRegionWithNoneEvictionOverride() { - final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.timestamps.cfg", "timestamps-none-eviction" ); - p.setProperty( "hibernate.cache.infinispan.timestamps.eviction.strategy", "NONE" ); - p.setProperty( "hibernate.cache.infinispan.timestamps.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.timestamps.eviction.max_entries", "10000" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - manager.getGlobalConfiguration().setTransportClass( null ); - try { - factory.buildTimestampsRegion( timestamps, p ); - assertTrue( factory.getDefinedConfigurations().contains( "timestamps-none-eviction" ) ); - } - finally { - sf.close(); - } - } + final String query = "org.hibernate.cache.internal.StandardQueryCache"; + QueryResultsRegionImpl queryRegion = (QueryResultsRegionImpl) + factory.buildQueryResultsRegion(query, p); + cache = queryRegion.getCache(); + assertTrue(factory.getTypeOverrides().get("query").isExposeStatistics()); + assertTrue(cache.getCacheConfiguration().jmxStatistics().enabled()); - @Test - public void testBuildQueryRegion() { - final String query = "org.hibernate.cache.internal.StandardQueryCache"; - Properties p = new Properties(); - SessionFactoryImplementor sf = createSessionFactory( null, p ); - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - manager.getGlobalConfiguration().setTransportClass( null ); - try { - assertTrue( factory.getDefinedConfigurations().contains( "local-query" ) ); - QueryResultsRegionImpl region = (QueryResultsRegionImpl) factory.buildQueryResultsRegion( query, p ); - CacheAdapter cache = region.getCacheAdapter(); - Configuration cacheCfg = cache.getConfiguration(); - assertEquals( CacheMode.LOCAL, cacheCfg.getCacheMode() ); - assertFalse( cacheCfg.isExposeJmxStatistics() ); - } - finally { - sf.close(); - } - } + final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; + ConfigurationBuilder builder = new ConfigurationBuilder(); + builder.clustering().stateTransfer().fetchInMemoryState(true); + manager.defineConfiguration("timestamps", builder.build()); + TimestampsRegionImpl timestampsRegion = (TimestampsRegionImpl) + factory.buildTimestampsRegion(timestamps, p); + cache = timestampsRegion.getCache(); + assertTrue(factory.getTypeOverrides().get("timestamps").isExposeStatistics()); + assertTrue(cache.getCacheConfiguration().jmxStatistics().enabled()); - @Test - public void testBuildQueryRegionWithCustomRegionName() { - final String queryRegionName = "myquery"; - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.myquery.cfg", "timestamps-none-eviction" ); - p.setProperty( "hibernate.cache.infinispan.myquery.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.myquery.eviction.wake_up_interval", "2222" ); - p.setProperty( "hibernate.cache.infinispan.myquery.eviction.max_entries", "11111" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); + CollectionRegionImpl collectionRegion = (CollectionRegionImpl) + factory.buildCollectionRegion("com.acme.Person.addresses", p, null); + cache = collectionRegion.getCache(); + assertTrue(factory.getTypeOverrides().get("collection").isExposeStatistics()); + assertTrue(cache.getCacheConfiguration().jmxStatistics().enabled()); + } finally { + factory.stop(); + } + } - InfinispanRegionFactory factory = ( InfinispanRegionFactory )sf.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - manager.getGlobalConfiguration().setTransportClass( null ); - try { - assertTrue( factory.getDefinedConfigurations().contains( "local-query" ) ); - QueryResultsRegionImpl region = (QueryResultsRegionImpl) factory.buildQueryResultsRegion( - queryRegionName, - p - ); - assertNotNull( factory.getTypeOverrides().get( queryRegionName ) ); - assertTrue( factory.getDefinedConfigurations().contains( queryRegionName ) ); - CacheAdapter cache = region.getCacheAdapter(); - Configuration cacheCfg = cache.getConfiguration(); - assertEquals( EvictionStrategy.FIFO, cacheCfg.getEvictionStrategy() ); - assertEquals( 2222, cacheCfg.getEvictionWakeUpInterval() ); - assertEquals( 11111, cacheCfg.getEvictionMaxEntries() ); - } - finally { - sf.close(); - } - } + @Test + public void testDisableStatistics() { + Properties p = new Properties(); + p.setProperty("hibernate.cache.infinispan.statistics", "false"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000"); + p.setProperty("hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000"); + p.setProperty("hibernate.cache.infinispan.entity.cfg", "myentity-cache"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.strategy", "FIFO"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000"); + p.setProperty("hibernate.cache.infinispan.entity.eviction.max_entries", "10000"); + InfinispanRegionFactory factory = createRegionFactory(p); + try { + EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion("com.acme.Address", p, null); + AdvancedCache cache = region.getCache(); + assertFalse(factory.getTypeOverrides().get("entity").isExposeStatistics()); + assertFalse(cache.getCacheConfiguration().jmxStatistics().enabled()); - @Test - public void testEnableStatistics() { - Properties p = new Properties(); - p.setProperty( InfinispanRegionFactory.INFINISPAN_GLOBAL_STATISTICS_PROP, "true" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000" ); - p.setProperty( "hibernate.cache.infinispan.entity.cfg", "myentity-cache" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.max_entries", "10000" ); - SessionFactoryImplementor sf = createSessionFactory( null, p ); - InfinispanRegionFactory factory = (InfinispanRegionFactory)sf.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - try { - assertTrue( manager.getGlobalConfiguration().isExposeGlobalJmxStatistics() ); - EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion( "com.acme.Address", p, null ); - CacheAdapter cache = region.getCacheAdapter(); - assertTrue( factory.getTypeOverrides().get( "entity" ).isExposeStatistics() ); - assertTrue( cache.getConfiguration().isExposeJmxStatistics() ); + region = (EntityRegionImpl) factory.buildEntityRegion("com.acme.Person", p, null); + cache = region.getCache(); + assertFalse(factory.getTypeOverrides().get("com.acme.Person").isExposeStatistics()); + assertFalse(cache.getCacheConfiguration().jmxStatistics().enabled()); - region = (EntityRegionImpl) factory.buildEntityRegion( "com.acme.Person", p, null ); - cache = region.getCacheAdapter(); - assertTrue( factory.getTypeOverrides().get( "com.acme.Person" ).isExposeStatistics() ); - assertTrue( cache.getConfiguration().isExposeJmxStatistics() ); + final String query = "org.hibernate.cache.internal.StandardQueryCache"; + QueryResultsRegionImpl queryRegion = (QueryResultsRegionImpl) factory.buildQueryResultsRegion(query, p); + cache = queryRegion.getCache(); + assertFalse(factory.getTypeOverrides().get("query").isExposeStatistics()); + assertFalse(cache.getCacheConfiguration().jmxStatistics().enabled()); - final String query = "org.hibernate.cache.internal.StandardQueryCache"; - QueryResultsRegionImpl queryRegion = (QueryResultsRegionImpl) factory.buildQueryResultsRegion( query, p ); - cache = queryRegion.getCacheAdapter(); - assertTrue( factory.getTypeOverrides().get( "query" ).isExposeStatistics() ); - assertTrue( cache.getConfiguration().isExposeJmxStatistics() ); + final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; + ConfigurationBuilder builder = new ConfigurationBuilder(); + builder.clustering().stateTransfer().fetchInMemoryState(true); + factory.getCacheManager().defineConfiguration("timestamps", builder.build()); + TimestampsRegionImpl timestampsRegion = (TimestampsRegionImpl) + factory.buildTimestampsRegion(timestamps, p); + cache = timestampsRegion.getCache(); + assertFalse(factory.getTypeOverrides().get("timestamps").isExposeStatistics()); + assertFalse(cache.getCacheConfiguration().jmxStatistics().enabled()); - final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; - Configuration config = new Configuration(); - config.setFetchInMemoryState( false ); - manager.defineConfiguration( "timestamps", config ); - TimestampsRegionImpl timestampsRegion = (TimestampsRegionImpl) factory.buildTimestampsRegion( - timestamps, - p - ); - cache = timestampsRegion.getCacheAdapter(); - assertTrue( factory.getTypeOverrides().get( "timestamps" ).isExposeStatistics() ); - assertTrue( cache.getConfiguration().isExposeJmxStatistics() ); + CollectionRegionImpl collectionRegion = (CollectionRegionImpl) + factory.buildCollectionRegion("com.acme.Person.addresses", p, null); + cache = collectionRegion.getCache(); + assertFalse(factory.getTypeOverrides().get("collection").isExposeStatistics()); + assertFalse(cache.getCacheConfiguration().jmxStatistics().enabled()); + } finally { + factory.stop(); + } + } + private InfinispanRegionFactory createRegionFactory(Properties p) { + return createRegionFactory(null, p); + } - CollectionRegionImpl collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion( - "com.acme.Person.addresses", - p, - null - ); - cache = collectionRegion.getCacheAdapter(); - assertTrue( factory.getTypeOverrides().get( "collection" ).isExposeStatistics() ); - assertTrue( cache.getConfiguration().isExposeJmxStatistics() ); - } - finally { - sf.close(); - } - } + private InfinispanRegionFactory createRegionFactory(final EmbeddedCacheManager manager, Properties p) { + final InfinispanRegionFactory factory = new SingleNodeTestCase.TestInfinispanRegionFactory() { - @Test - public void testDisableStatistics() { - Properties p = new Properties(); - p.setProperty( "hibernate.cache.infinispan.statistics", "false" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.lifespan", "60000" ); - p.setProperty( "hibernate.cache.infinispan.com.acme.Person.expiration.max_idle", "30000" ); - p.setProperty( "hibernate.cache.infinispan.entity.cfg", "myentity-cache" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.strategy", "FIFO" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.wake_up_interval", "3000" ); - p.setProperty( "hibernate.cache.infinispan.entity.eviction.max_entries", "10000" ); - SessionFactoryImplementor sessionFactoryImplementor = createSessionFactory( null, p ); - InfinispanRegionFactory factory = ( InfinispanRegionFactory )sessionFactoryImplementor.getServiceRegistry().getService( RegionFactory.class ); - EmbeddedCacheManager manager = factory.getCacheManager(); - try { - assertFalse( manager.getGlobalConfiguration().isExposeGlobalJmxStatistics() ); - EntityRegionImpl region = (EntityRegionImpl) factory.buildEntityRegion( "com.acme.Address", p, null ); - CacheAdapter cache = region.getCacheAdapter(); - assertFalse( factory.getTypeOverrides().get( "entity" ).isExposeStatistics() ); - assertFalse( cache.getConfiguration().isExposeJmxStatistics() ); + @Override + protected org.infinispan.transaction.lookup.TransactionManagerLookup createTransactionManagerLookup(ServiceRegistry sr) { + return new HibernateTransactionManagerLookup(null, null) { + @Override + public TransactionManager getTransactionManager() throws Exception { + AbstractJtaPlatform jta = new JBossStandAloneJtaPlatform(); + jta.injectServices(ServiceRegistryBuilder.buildServiceRegistry()); + return jta.getTransactionManager(); + } + }; + } - region = (EntityRegionImpl) factory.buildEntityRegion( "com.acme.Person", p, null ); - cache = region.getCacheAdapter(); - assertFalse( factory.getTypeOverrides().get( "com.acme.Person" ).isExposeStatistics() ); - assertFalse( cache.getConfiguration().isExposeJmxStatistics() ); + @Override + protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException { + if (manager != null) + return manager; + else + return super.createCacheManager(properties); + } - final String query = "org.hibernate.cache.internal.StandardQueryCache"; - QueryResultsRegionImpl queryRegion = (QueryResultsRegionImpl) factory.buildQueryResultsRegion( query, p ); - cache = queryRegion.getCacheAdapter(); - assertFalse( factory.getTypeOverrides().get( "query" ).isExposeStatistics() ); - assertFalse( cache.getConfiguration().isExposeJmxStatistics() ); - - final String timestamps = "org.hibernate.cache.spi.UpdateTimestampsCache"; - Configuration config = new Configuration(); - config.setFetchInMemoryState( false ); - manager.defineConfiguration( "timestamps", config ); - TimestampsRegionImpl timestampsRegion = (TimestampsRegionImpl) factory.buildTimestampsRegion( - timestamps, - p - ); - cache = timestampsRegion.getCacheAdapter(); - assertFalse( factory.getTypeOverrides().get( "timestamps" ).isExposeStatistics() ); - assertFalse( cache.getConfiguration().isExposeJmxStatistics() ); - - CollectionRegionImpl collectionRegion = (CollectionRegionImpl) factory.buildCollectionRegion( - "com.acme.Person.addresses", - p, - null - ); - cache = collectionRegion.getCacheAdapter(); - assertFalse( factory.getTypeOverrides().get( "collection" ).isExposeStatistics() ); - assertFalse( cache.getConfiguration().isExposeJmxStatistics() ); - } - finally { - sessionFactoryImplementor.close(); - } - } - - private SessionFactoryImplementor createSessionFactory(final EmbeddedCacheManager manager, Properties p) { - Properties properties = new Properties(); - properties.putAll( p ); - properties.put( - RegionFactoryInitiator.IMPL_NAME, - new InfinispanRegionFactory() { - @Override - protected EmbeddedCacheManager createCacheManager() throws CacheException { - if ( manager != null ) { - return manager; - } - else { - return super.createCacheManager(); - } - } - } - ); - properties.put( AvailableSettings.JTA_PLATFORM, JBossStandAloneJtaPlatform.class.getName() ); - org.hibernate.cfg.Configuration cfg = new org.hibernate.cfg.Configuration(); - cfg.setProperties( properties ); - ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( properties ); - return (SessionFactoryImplementor) cfg.buildSessionFactory( serviceRegistry ); - } + }; + factory.start(null, p); + return factory; + } } diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/NodeEnvironment.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/NodeEnvironment.java index 1a7d9834b1..0ed289ad5e 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/NodeEnvironment.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/NodeEnvironment.java @@ -25,20 +25,15 @@ package org.hibernate.test.cache.infinispan; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.Callable; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; +import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl; import org.hibernate.cache.infinispan.InfinispanRegionFactory; import org.hibernate.cache.infinispan.collection.CollectionRegionImpl; import org.hibernate.cache.infinispan.entity.EntityRegionImpl; -import org.hibernate.cache.infinispan.util.FlagAdapter; import org.hibernate.cache.spi.CacheDataDescription; -import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cfg.Configuration; -import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl; -import org.hibernate.engine.spi.SessionFactoryImplementor; - -import static org.hibernate.cache.infinispan.util.CacheHelper.withinTx; +import org.hibernate.test.cache.infinispan.util.CacheTestUtil; /** * Defines the environment for a node. @@ -46,112 +41,113 @@ import static org.hibernate.cache.infinispan.util.CacheHelper.withinTx; * @author Steve Ebersole */ public class NodeEnvironment { - private final Configuration configuration; - private StandardServiceRegistryImpl serviceRegistry; - private InfinispanRegionFactory regionFactory; - private SessionFactoryImplementor sessionFactory; + private final Configuration configuration; + private StandardServiceRegistryImpl serviceRegistry; + private InfinispanRegionFactory regionFactory; - private Map entityRegionMap; - private Map collectionRegionMap; + private Map entityRegionMap; + private Map collectionRegionMap; - public NodeEnvironment(Configuration configuration) { - this.configuration = configuration; - } + public NodeEnvironment(Configuration configuration) { + this.configuration = configuration; + } - public Configuration getConfiguration() { - return configuration; - } + public Configuration getConfiguration() { + return configuration; + } - public StandardServiceRegistryImpl getServiceRegistry() { - return serviceRegistry; - } + public StandardServiceRegistryImpl getServiceRegistry() { + return serviceRegistry; + } - public EntityRegionImpl getEntityRegion(String name, CacheDataDescription cacheDataDescription) { - if ( entityRegionMap == null ) { - entityRegionMap = new HashMap(); - return buildAndStoreEntityRegion( name, cacheDataDescription ); - } - EntityRegionImpl region = entityRegionMap.get( name ); - if ( region == null ) { - region = buildAndStoreEntityRegion( name, cacheDataDescription ); - } - return region; - } + public EntityRegionImpl getEntityRegion(String name, CacheDataDescription cacheDataDescription) { + if (entityRegionMap == null) { + entityRegionMap = new HashMap(); + return buildAndStoreEntityRegion(name, cacheDataDescription); + } + EntityRegionImpl region = entityRegionMap.get(name); + if (region == null) { + region = buildAndStoreEntityRegion(name, cacheDataDescription); + } + return region; + } - private EntityRegionImpl buildAndStoreEntityRegion(String name, CacheDataDescription cacheDataDescription) { - EntityRegionImpl region = (EntityRegionImpl) regionFactory.buildEntityRegion( - name, - configuration.getProperties(), - cacheDataDescription - ); - entityRegionMap.put( name, region ); - return region; - } + private EntityRegionImpl buildAndStoreEntityRegion(String name, CacheDataDescription cacheDataDescription) { + EntityRegionImpl region = (EntityRegionImpl) regionFactory.buildEntityRegion( + name, + configuration.getProperties(), + cacheDataDescription + ); + entityRegionMap.put(name, region); + return region; + } - public CollectionRegionImpl getCollectionRegion(String name, CacheDataDescription cacheDataDescription) { - if ( collectionRegionMap == null ) { - collectionRegionMap = new HashMap(); - return buildAndStoreCollectionRegion( name, cacheDataDescription ); - } - CollectionRegionImpl region = collectionRegionMap.get( name ); - if ( region == null ) { - region = buildAndStoreCollectionRegion( name, cacheDataDescription ); - collectionRegionMap.put( name, region ); - } - return region; - } + public CollectionRegionImpl getCollectionRegion(String name, CacheDataDescription cacheDataDescription) { + if (collectionRegionMap == null) { + collectionRegionMap = new HashMap(); + return buildAndStoreCollectionRegion(name, cacheDataDescription); + } + CollectionRegionImpl region = collectionRegionMap.get(name); + if (region == null) { + region = buildAndStoreCollectionRegion(name, cacheDataDescription); + collectionRegionMap.put(name, region); + } + return region; + } - private CollectionRegionImpl buildAndStoreCollectionRegion(String name, CacheDataDescription cacheDataDescription) { - CollectionRegionImpl region; - region = (CollectionRegionImpl) regionFactory.buildCollectionRegion( - name, - configuration.getProperties(), - cacheDataDescription - ); - return region; - } + private CollectionRegionImpl buildAndStoreCollectionRegion(String name, CacheDataDescription cacheDataDescription) { + CollectionRegionImpl region; + region = (CollectionRegionImpl) regionFactory.buildCollectionRegion( + name, + configuration.getProperties(), + cacheDataDescription + ); + return region; + } - public void prepare() throws Exception { - serviceRegistry = (StandardServiceRegistryImpl) new StandardServiceRegistryBuilder() - .applySettings( configuration.getProperties() ) - .build(); - sessionFactory = (SessionFactoryImplementor)configuration.buildSessionFactory( serviceRegistry ); - regionFactory = (InfinispanRegionFactory)sessionFactory.getServiceRegistry().getService( RegionFactory.class ); - } + public void prepare() throws Exception { + serviceRegistry = (StandardServiceRegistryImpl) new StandardServiceRegistryBuilder() + .applySettings(configuration.getProperties()) + .build(); + regionFactory = CacheTestUtil.startRegionFactory(serviceRegistry, configuration); + } - public void release() throws Exception { - if ( entityRegionMap != null ) { - for ( final EntityRegionImpl region : entityRegionMap.values() ) { - withinTx(region.getTransactionManager(), new Callable() { - @Override - public Void call() throws Exception { - region.getCacheAdapter().withFlags(FlagAdapter.CACHE_MODE_LOCAL).clear(); - return null; + public void release() throws Exception { + try { + if (entityRegionMap != null) { + for (EntityRegionImpl region : entityRegionMap.values()) { + try { + region.getCache().stop(); + } catch (Exception e) { + // Ignore... } - }); - region.getCacheAdapter().stop(); - } - entityRegionMap.clear(); - } - if ( collectionRegionMap != null ) { - for ( final CollectionRegionImpl collectionRegion : collectionRegionMap.values() ) { - withinTx(collectionRegion.getTransactionManager(), new Callable() { - @Override - public Void call() throws Exception { - collectionRegion.getCacheAdapter().withFlags( FlagAdapter.CACHE_MODE_LOCAL ).clear(); - return null; + } + entityRegionMap.clear(); + } + if (collectionRegionMap != null) { + for (CollectionRegionImpl reg : collectionRegionMap.values()) { + try { + reg.getCache().stop(); + } catch (Exception e) { + // Ignore... } - }); - collectionRegion.getCacheAdapter().stop(); - } - collectionRegionMap.clear(); - } - if ( sessionFactory != null ) { - sessionFactory.close(); - } - if ( serviceRegistry != null ) { - serviceRegistry.destroy(); - } - } + + } + collectionRegionMap.clear(); + } + } finally { + try { + if (regionFactory != null) { + // Currently the RegionFactory is shutdown by its registration + // with the CacheTestSetup from CacheTestUtil when built + regionFactory.stop(); + } + } finally { + if (serviceRegistry != null) { + serviceRegistry.destroy(); + } + } + } + } } diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/access/PutFromLoadValidatorUnitTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/access/PutFromLoadValidatorUnitTestCase.java index ccc83ab0ff..eac2c24318 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/access/PutFromLoadValidatorUnitTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/access/PutFromLoadValidatorUnitTestCase.java @@ -32,17 +32,21 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import javax.transaction.Transaction; import javax.transaction.TransactionManager; +import org.infinispan.manager.EmbeddedCacheManager; +import org.infinispan.test.CacheManagerCallable; +import org.infinispan.test.fwk.TestCacheManagerFactory; +import org.infinispan.util.logging.Log; +import org.infinispan.util.logging.LogFactory; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; import org.hibernate.cache.infinispan.access.PutFromLoadValidator; import org.hibernate.test.cache.infinispan.functional.cluster.DualNodeJtaTransactionManagerImpl; +import static org.infinispan.test.TestingUtil.withCacheManager; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; @@ -56,8 +60,11 @@ import static org.junit.Assert.fail; * @author Galder Zamarreño * @version $Revision: $ */ -@Ignore public class PutFromLoadValidatorUnitTestCase { + + private static final Log log = LogFactory.getLog( + PutFromLoadValidatorUnitTestCase.class); + private Object KEY1 = "KEY1"; private TransactionManager tm; @@ -67,16 +74,17 @@ public class PutFromLoadValidatorUnitTestCase { tm = DualNodeJtaTransactionManagerImpl.getInstance("test"); } - @After - public void tearDown() throws Exception { - tm = null; - try { - DualNodeJtaTransactionManagerImpl.cleanupTransactions(); - } - finally { - DualNodeJtaTransactionManagerImpl.cleanupTransactionManagers(); - } + @After + public void tearDown() throws Exception { + tm = null; + try { + DualNodeJtaTransactionManagerImpl.cleanupTransactions(); + } + finally { + DualNodeJtaTransactionManagerImpl.cleanupTransactionManagers(); + } } + @Test public void testNakedPut() throws Exception { nakedPutTest(false); @@ -86,20 +94,32 @@ public class PutFromLoadValidatorUnitTestCase { nakedPutTest(true); } - private void nakedPutTest(boolean transactional) throws Exception { - PutFromLoadValidator testee = new PutFromLoadValidator(transactional ? tm : null); - if (transactional) { - tm.begin(); - } - boolean lockable = testee.acquirePutFromLoadLock(KEY1); - try { - assertTrue(lockable); - } - finally { - if (lockable) { - testee.releasePutFromLoadLock(KEY1); + private void nakedPutTest(final boolean transactional) throws Exception { + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + try { + PutFromLoadValidator testee = new PutFromLoadValidator(cm, + transactional ? tm : null, + PutFromLoadValidator.NAKED_PUT_INVALIDATION_PERIOD); + if (transactional) { + tm.begin(); + } + boolean lockable = testee.acquirePutFromLoadLock(KEY1); + try { + assertTrue(lockable); + } + finally { + if (lockable) { + testee.releasePutFromLoadLock(KEY1); + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } } - } + }); } @Test public void testRegisteredPut() throws Exception { @@ -110,23 +130,34 @@ public class PutFromLoadValidatorUnitTestCase { registeredPutTest(true); } - private void registeredPutTest(boolean transactional) throws Exception { - PutFromLoadValidator testee = new PutFromLoadValidator( - transactional ? tm : null); - if (transactional) { - tm.begin(); - } - testee.registerPendingPut(KEY1); + private void registeredPutTest(final boolean transactional) throws Exception { + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + PutFromLoadValidator testee = new PutFromLoadValidator(cm, + transactional ? tm : null, + PutFromLoadValidator.NAKED_PUT_INVALIDATION_PERIOD); + try { + if (transactional) { + tm.begin(); + } + testee.registerPendingPut(KEY1); - boolean lockable = testee.acquirePutFromLoadLock(KEY1); - try { - assertTrue(lockable); - } - finally { - if (lockable) { - testee.releasePutFromLoadLock(KEY1); + boolean lockable = testee.acquirePutFromLoadLock(KEY1); + try { + assertTrue(lockable); + } + finally { + if (lockable) { + testee.releasePutFromLoadLock(KEY1); + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } } - } + }); } @Test public void testNakedPutAfterKeyRemoval() throws Exception { @@ -145,28 +176,40 @@ public class PutFromLoadValidatorUnitTestCase { nakedPutAfterRemovalTest(true, true); } - private void nakedPutAfterRemovalTest(boolean transactional, boolean removeRegion) - throws Exception { - PutFromLoadValidator testee = new PutFromLoadValidator( - transactional ? tm : null); - if (removeRegion) { - testee.invalidateRegion(); - } else { - testee.invalidateKey(KEY1); - } - if (transactional) { - tm.begin(); - } + private void nakedPutAfterRemovalTest(final boolean transactional, + final boolean removeRegion) throws Exception { + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + PutFromLoadValidator testee = new PutFromLoadValidator(cm, + transactional ? tm : null, + PutFromLoadValidator.NAKED_PUT_INVALIDATION_PERIOD); + if (removeRegion) { + testee.invalidateRegion(); + } else { + testee.invalidateKey(KEY1); + } + try { + if (transactional) { + tm.begin(); + } - boolean lockable = testee.acquirePutFromLoadLock(KEY1); - try { - assertFalse(lockable); - } - finally { - if (lockable) { - testee.releasePutFromLoadLock(KEY1); + boolean lockable = testee.acquirePutFromLoadLock(KEY1); + try { + assertFalse(lockable); + } + finally { + if (lockable) { + testee.releasePutFromLoadLock(KEY1); + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } } - } + }); + } @Test public void testRegisteredPutAfterKeyRemoval() throws Exception { @@ -185,29 +228,41 @@ public class PutFromLoadValidatorUnitTestCase { registeredPutAfterRemovalTest(true, true); } - private void registeredPutAfterRemovalTest(boolean transactional, boolean removeRegion) - throws Exception { - PutFromLoadValidator testee = new PutFromLoadValidator( - transactional ? tm : null); - if (removeRegion) { - testee.invalidateRegion(); - } else { - testee.invalidateKey(KEY1); - } - if (transactional) { - tm.begin(); - } - testee.registerPendingPut(KEY1); + private void registeredPutAfterRemovalTest(final boolean transactional, + final boolean removeRegion) throws Exception { + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + PutFromLoadValidator testee = new PutFromLoadValidator(cm, + transactional ? tm : null, + PutFromLoadValidator.NAKED_PUT_INVALIDATION_PERIOD); + if (removeRegion) { + testee.invalidateRegion(); + } else { + testee.invalidateKey(KEY1); + } + try { + if (transactional) { + tm.begin(); + } + testee.registerPendingPut(KEY1); - boolean lockable = testee.acquirePutFromLoadLock(KEY1); - try { - assertTrue(lockable); - } - finally { - if (lockable) { - testee.releasePutFromLoadLock(KEY1); + boolean lockable = testee.acquirePutFromLoadLock(KEY1); + try { + assertTrue(lockable); + } + finally { + if (lockable) { + testee.releasePutFromLoadLock(KEY1); + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } } - } + }); + } @Test public void testRegisteredPutWithInterveningKeyRemoval() throws Exception { @@ -226,29 +281,41 @@ public class PutFromLoadValidatorUnitTestCase { registeredPutWithInterveningRemovalTest(true, true); } - private void registeredPutWithInterveningRemovalTest(boolean transactional, boolean removeRegion) + private void registeredPutWithInterveningRemovalTest( + final boolean transactional, final boolean removeRegion) throws Exception { - PutFromLoadValidator testee = new PutFromLoadValidator( - transactional ? tm : null); - if (transactional) { - tm.begin(); - } - testee.registerPendingPut(KEY1); - if (removeRegion) { - testee.invalidateRegion(); - } else { - testee.invalidateKey(KEY1); - } + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + PutFromLoadValidator testee = new PutFromLoadValidator(cm, + transactional ? tm : null, + PutFromLoadValidator.NAKED_PUT_INVALIDATION_PERIOD); + try { + if (transactional) { + tm.begin(); + } + testee.registerPendingPut(KEY1); + if (removeRegion) { + testee.invalidateRegion(); + } else { + testee.invalidateKey(KEY1); + } - boolean lockable = testee.acquirePutFromLoadLock(KEY1); - try { - assertFalse(lockable); - } - finally { - if (lockable) { - testee.releasePutFromLoadLock(KEY1); + boolean lockable = testee.acquirePutFromLoadLock(KEY1); + try { + assertFalse(lockable); + } + finally { + if (lockable) { + testee.releasePutFromLoadLock(KEY1); + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } } - } + }); } @Test public void testDelayedNakedPutAfterKeyRemoval() throws Exception { @@ -267,85 +334,114 @@ public class PutFromLoadValidatorUnitTestCase { delayedNakedPutAfterRemovalTest(true, true); } - private void delayedNakedPutAfterRemovalTest(boolean transactional, boolean removeRegion) + private void delayedNakedPutAfterRemovalTest( + final boolean transactional, final boolean removeRegion) throws Exception { - PutFromLoadValidator testee = new TestValidator(transactional ? tm : null, 100, 1000, 500, 10000); - if (removeRegion) { - testee.invalidateRegion(); - } else { - testee.invalidateKey(KEY1); - } - if (transactional) { - tm.begin(); - } - Thread.sleep(110); + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + PutFromLoadValidator testee = new TestValidator(cm, + transactional ? tm : null, 100); + if (removeRegion) { + testee.invalidateRegion(); + } else { + testee.invalidateKey(KEY1); + } + try { + if (transactional) { + tm.begin(); + } + Thread.sleep(110); - boolean lockable = testee.acquirePutFromLoadLock(KEY1); - try { - assertTrue(lockable); - } - finally { - if (lockable) { - testee.releasePutFromLoadLock(KEY1); + boolean lockable = testee.acquirePutFromLoadLock(KEY1); + try { + assertTrue(lockable); + } + finally { + if (lockable) { + testee.releasePutFromLoadLock(KEY1); + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } } - } + }); } + @Test public void testMultipleRegistrations() throws Exception { multipleRegistrationtest(false); } + @Test public void testMultipleRegistrationsTransactional() throws Exception { multipleRegistrationtest(true); } private void multipleRegistrationtest(final boolean transactional) throws Exception { - final PutFromLoadValidator testee = new PutFromLoadValidator(transactional ? tm : null); + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + final PutFromLoadValidator testee = new PutFromLoadValidator(cm, + transactional ? tm : null, + PutFromLoadValidator.NAKED_PUT_INVALIDATION_PERIOD); - final CountDownLatch registeredLatch = new CountDownLatch(3); - final CountDownLatch finishedLatch = new CountDownLatch(3); - final AtomicInteger success = new AtomicInteger(); + final CountDownLatch registeredLatch = new CountDownLatch(3); + final CountDownLatch finishedLatch = new CountDownLatch(3); + final AtomicInteger success = new AtomicInteger(); - Runnable r = new Runnable() { - public void run() { - try { - if (transactional) { - tm.begin(); - } - testee.registerPendingPut(KEY1); - registeredLatch.countDown(); - registeredLatch.await(5, TimeUnit.SECONDS); - if (testee.acquirePutFromLoadLock(KEY1)) { + Runnable r = new Runnable() { + public void run() { try { - success.incrementAndGet(); + if (transactional) { + tm.begin(); + } + testee.registerPendingPut(KEY1); + registeredLatch.countDown(); + registeredLatch.await(5, TimeUnit.SECONDS); + if (testee.acquirePutFromLoadLock(KEY1)) { + try { + log.trace("Put from load lock acquired for key = " + KEY1); + success.incrementAndGet(); + } + finally { + testee.releasePutFromLoadLock(KEY1); + } + } else { + log.trace("Unable to acquired putFromLoad lock for key = " + KEY1); + } + finishedLatch.countDown(); } - finally { - testee.releasePutFromLoadLock(KEY1); + catch (Exception e) { + e.printStackTrace(); } } - finishedLatch.countDown(); - } - catch (Exception e) { - e.printStackTrace(); + }; + + ExecutorService executor = Executors.newFixedThreadPool(3); + + // Start with a removal so the "isPutValid" calls will fail if + // any of the concurrent activity isn't handled properly + + testee.invalidateRegion(); + + // Do the registration + isPutValid calls + executor.execute(r); + executor.execute(r); + executor.execute(r); + + try { + finishedLatch.await(5, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(e); } + + assertEquals("All threads succeeded", 3, success.get()); } - }; - - ExecutorService executor = Executors.newFixedThreadPool(3); - - // Start with a removal so the "isPutValid" calls will fail if - // any of the concurrent activity isn't handled properly - - testee.invalidateRegion(); - - // Do the registration + isPutValid calls - executor.execute(r); - executor.execute(r); - executor.execute(r); - - finishedLatch.await(5, TimeUnit.SECONDS); - - assertEquals("All threads succeeded", 3, success.get()); + }); } /** @@ -356,17 +452,23 @@ public class PutFromLoadValidatorUnitTestCase { */ @Test public void testRemovalCleanup() throws Exception { - TestValidator testee = new TestValidator(null, 200, 1000, 500, 10000); - testee.invalidateKey("KEY1"); - testee.invalidateKey("KEY2"); - expectRemovalLenth(2, testee, 3000l); - assertEquals(2, testee.getRemovalQueueLength()); - expectRemovalLenth(2, testee, 3000l); - assertEquals(2, testee.getRemovalQueueLength()); - expectRemovalLenth( 2, testee, 3000l ); + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + TestValidator testee = new TestValidator(cm, null, 200); + testee.invalidateKey("KEY1"); + testee.invalidateKey("KEY2"); + expectRemovalLenth(2, testee, 60000l); + assertEquals(2, testee.getRemovalQueueLength()); + expectRemovalLenth(2, testee, 60000l); + assertEquals(2, testee.getRemovalQueueLength()); + expectRemovalLenth(2, testee, 60000l); + } + }); } - private void expectRemovalLenth(int expectedLength, TestValidator testee, long timeout) throws InterruptedException { + private void expectRemovalLenth(int expectedLength, TestValidator testee, long timeout) { long timeoutMilestone = System.currentTimeMillis() + timeout; while ( true ) { int queueLength = testee.getRemovalQueueLength(); @@ -378,215 +480,101 @@ public class PutFromLoadValidatorUnitTestCase { if ( System.currentTimeMillis() > timeoutMilestone ) { fail( "condition not reached after " + timeout + " milliseconds. giving up!" ); } - Thread.sleep(20); + try { + Thread.sleep(20); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } } } } - /** - * Very much a white box test of the logic for ensuring pending put registrations get cleaned up. - * - * @throws Exception - */ - @Test - public void testPendingPutCleanup() throws Exception { - TestValidator testee = new TestValidator(tm, 5000, 600, 300, 900); - - // Start with a regionRemoval so we can confirm at the end that all - // registrations have been cleaned out - testee.invalidateRegion(); - - testee.registerPendingPut("1"); - testee.registerPendingPut("2"); - testee.registerPendingPut("3"); - testee.registerPendingPut("4"); - testee.registerPendingPut("5"); - testee.registerPendingPut("6"); - testee.acquirePutFromLoadLock("6"); - testee.releasePutFromLoadLock("6"); - testee.acquirePutFromLoadLock("2"); - testee.releasePutFromLoadLock("2"); - // ppq = [1,2(c),3,4,5,6(c)] - assertEquals(6, testee.getPendingPutQueueLength()); - assertEquals(0, testee.getOveragePendingPutQueueLength()); - - // Sleep past "pendingPutRecentPeriod" - Thread.sleep(310); - testee.registerPendingPut("7"); - // White box -- should have cleaned out 2 (completed) but - // not gotten to 6 (also removed) - // ppq = [1,3,4,5,6(c),7] - assertEquals(0, testee.getOveragePendingPutQueueLength()); - assertEquals(6, testee.getPendingPutQueueLength()); - - // Sleep past "pendingPutOveragePeriod" - Thread.sleep(310); - testee.registerPendingPut("8"); - // White box -- should have cleaned out 6 (completed) and - // moved 1, 3, 4 and 5 to overage queue - // oppq = [1,3,4,5] ppq = [7,8] - assertEquals(4, testee.getOveragePendingPutQueueLength()); - assertEquals(2, testee.getPendingPutQueueLength()); - - // Sleep past "maxPendingPutDelay" - Thread.sleep(310); - testee.acquirePutFromLoadLock("3"); - testee.releasePutFromLoadLock("3"); - // White box -- should have cleaned out 1 (overage) and - // moved 7 to overage queue - // oppq = [3(c),4,5,7] ppq=[8] - assertEquals(4, testee.getOveragePendingPutQueueLength()); - assertEquals(1, testee.getPendingPutQueueLength()); - - // Sleep past "maxPendingPutDelay" - Thread.sleep(310); - tm.begin(); - testee.registerPendingPut("7"); - Transaction tx = tm.suspend(); - - // White box -- should have cleaned out 3 (completed) - // and 4 (overage) and moved 8 to overage queue - // We now have 5,7,8 in overage and 7tx in pending - // oppq = [5,7,8] ppq=[7tx] - assertEquals(3, testee.getOveragePendingPutQueueLength()); - assertEquals(1, testee.getPendingPutQueueLength()); - - // Validate that only expected items can do puts, thus indirectly - // proving the others have been cleaned out of pendingPuts map - boolean locked = testee.acquirePutFromLoadLock("1"); - if (locked) { - testee.releasePutFromLoadLock("1"); - } - assertFalse(locked); - // 5 was overage, so should have been cleaned - assertEquals(2, testee.getOveragePendingPutQueueLength()); - locked = testee.acquirePutFromLoadLock("2"); - if (locked) { - testee.releasePutFromLoadLock("1"); - } - assertFalse(locked); - // 7 was overage, so should have been cleaned - assertEquals(1, testee.getOveragePendingPutQueueLength()); - locked = testee.acquirePutFromLoadLock("3"); - if (locked) { - testee.releasePutFromLoadLock("1"); - } - assertFalse(locked); - locked = testee.acquirePutFromLoadLock("4"); - if (locked) { - testee.releasePutFromLoadLock("1"); - } - assertFalse(locked); - locked = testee.acquirePutFromLoadLock("5"); - if (locked) { - testee.releasePutFromLoadLock("1"); - } - assertFalse(locked); - locked = testee.acquirePutFromLoadLock("1"); - if (locked) { - testee.releasePutFromLoadLock("1"); - } - assertFalse(testee.acquirePutFromLoadLock("6")); - locked = testee.acquirePutFromLoadLock("7"); - if (locked) { - testee.releasePutFromLoadLock("1"); - } - assertFalse(locked); - assertTrue(testee.acquirePutFromLoadLock("8")); - testee.releasePutFromLoadLock("8"); - tm.resume(tx); - assertTrue(testee.acquirePutFromLoadLock("7")); - testee.releasePutFromLoadLock("7"); - } @Test public void testInvalidateKeyBlocksForInProgressPut() throws Exception { invalidationBlocksForInProgressPutTest(true); } + @Test public void testInvalidateRegionBlocksForInProgressPut() throws Exception { invalidationBlocksForInProgressPutTest(false); } private void invalidationBlocksForInProgressPutTest(final boolean keyOnly) throws Exception { - final PutFromLoadValidator testee = new PutFromLoadValidator(null); - final CountDownLatch removeLatch = new CountDownLatch(1); - final CountDownLatch pferLatch = new CountDownLatch(1); - final AtomicReference cache = new AtomicReference("INITIAL"); + withCacheManager(new CacheManagerCallable( + TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + final PutFromLoadValidator testee = new PutFromLoadValidator( + cm, null, PutFromLoadValidator.NAKED_PUT_INVALIDATION_PERIOD); + final CountDownLatch removeLatch = new CountDownLatch(1); + final CountDownLatch pferLatch = new CountDownLatch(1); + final AtomicReference cache = new AtomicReference("INITIAL"); - Callable pferCallable = new Callable() { - public Boolean call() throws Exception { - testee.registerPendingPut(KEY1); - if (testee.acquirePutFromLoadLock(KEY1)) { + Callable pferCallable = new Callable() { + public Boolean call() throws Exception { + testee.registerPendingPut(KEY1); + if (testee.acquirePutFromLoadLock(KEY1)) { + try { + removeLatch.countDown(); + pferLatch.await(); + cache.set("PFER"); + return Boolean.TRUE; + } + finally { + testee.releasePutFromLoadLock(KEY1); + } + } + return Boolean.FALSE; + } + }; + + Callable invalidateCallable = new Callable() { + public Void call() throws Exception { + removeLatch.await(); + if (keyOnly) { + testee.invalidateKey(KEY1); + } else { + testee.invalidateRegion(); + } + cache.set(null); + return null; + } + }; + + ExecutorService executorService = Executors.newCachedThreadPool(); + Future pferFuture = executorService.submit(pferCallable); + Future invalidateFuture = executorService.submit(invalidateCallable); + + try { try { - removeLatch.countDown(); - pferLatch.await(); - cache.set("PFER"); - return Boolean.TRUE; - } - finally { - testee.releasePutFromLoadLock(KEY1); + invalidateFuture.get(1, TimeUnit.SECONDS); + fail("invalidateFuture did not block"); } + catch (TimeoutException good) {} + + pferLatch.countDown(); + + assertTrue(pferFuture.get(5, TimeUnit.SECONDS)); + invalidateFuture.get(5, TimeUnit.SECONDS); + + assertNull(cache.get()); + } catch (Exception e) { + throw new RuntimeException(e); } - return Boolean.FALSE; } - }; - - Callable invalidateCallable = new Callable() { - public Void call() throws Exception { - removeLatch.await(); - if (keyOnly) { - testee.invalidateKey(KEY1); - } else { - testee.invalidateRegion(); - } - cache.set(null); - return null; - } - }; - - ExecutorService executorService = Executors.newCachedThreadPool(); - Future pferFuture = executorService.submit(pferCallable); - Future invalidateFuture = executorService.submit(invalidateCallable); - - try { - invalidateFuture.get(1, TimeUnit.SECONDS); - fail("invalidateFuture did not block"); - } - catch (TimeoutException good) {} - - pferLatch.countDown(); - - assertTrue(pferFuture.get(5, TimeUnit.SECONDS)); - invalidateFuture.get(5, TimeUnit.SECONDS); - - assertNull(cache.get()); - + }); } private static class TestValidator extends PutFromLoadValidator { - protected TestValidator(TransactionManager transactionManager, - long nakedPutInvalidationPeriod, long pendingPutOveragePeriod, - long pendingPutRecentPeriod, long maxPendingPutDelay) { - super(transactionManager, nakedPutInvalidationPeriod, pendingPutOveragePeriod, - pendingPutRecentPeriod, maxPendingPutDelay); - } - - @Override - public int getOveragePendingPutQueueLength() { - // TODO Auto-generated method stub - return super.getOveragePendingPutQueueLength(); - } - - @Override - public int getPendingPutQueueLength() { - // TODO Auto-generated method stub - return super.getPendingPutQueueLength(); + protected TestValidator(EmbeddedCacheManager cm, + TransactionManager transactionManager, + long nakedPutInvalidationPeriod) { + super(cm, transactionManager, nakedPutInvalidationPeriod); } @Override public int getRemovalQueueLength() { - // TODO Auto-generated method stub return super.getRemovalQueueLength(); } diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/AbstractCollectionRegionAccessStrategyTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/AbstractCollectionRegionAccessStrategyTestCase.java index 0cb00ede98..23793fa837 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/AbstractCollectionRegionAccessStrategyTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/AbstractCollectionRegionAccessStrategyTestCase.java @@ -32,6 +32,9 @@ import java.util.concurrent.TimeUnit; import javax.transaction.TransactionManager; import junit.framework.AssertionFailedError; +import org.hibernate.cache.infinispan.util.Caches; +import org.infinispan.test.CacheManagerCallable; +import org.infinispan.test.fwk.TestCacheManagerFactory; import org.infinispan.transaction.tm.BatchModeTransactionManager; import org.jboss.logging.Logger; import org.junit.After; @@ -42,7 +45,6 @@ import org.hibernate.cache.infinispan.InfinispanRegionFactory; import org.hibernate.cache.infinispan.access.PutFromLoadValidator; import org.hibernate.cache.infinispan.access.TransactionalAccessDelegate; import org.hibernate.cache.infinispan.collection.CollectionRegionImpl; -import org.hibernate.cache.infinispan.util.CacheHelper; import org.hibernate.cache.internal.CacheDataDescriptionImpl; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.access.AccessType; @@ -53,6 +55,7 @@ import org.hibernate.test.cache.infinispan.AbstractNonFunctionalTestCase; import org.hibernate.test.cache.infinispan.NodeEnvironment; import org.hibernate.test.cache.infinispan.util.CacheTestUtil; +import static org.infinispan.test.TestingUtil.withCacheManager; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; @@ -103,8 +106,8 @@ public abstract class AbstractCollectionRegionAccessStrategyTestCase extends Abs localCollectionRegion = localEnvironment.getCollectionRegion( REGION_NAME, getCacheDataDescription() ); localAccessStrategy = localCollectionRegion.buildAccessStrategy( getAccessType() ); - invalidation = localCollectionRegion.getCacheAdapter().isClusteredInvalidation(); - synchronous = localCollectionRegion.getCacheAdapter().isSynchronous(); + invalidation = Caches.isInvalidationCache(localCollectionRegion.getCache()); + synchronous = Caches.isSynchronousCache(localCollectionRegion.getCache()); // Sleep a bit to avoid concurrent FLUSH problem avoidConcurrentFlush(); @@ -161,60 +164,70 @@ public abstract class AbstractCollectionRegionAccessStrategyTestCase extends Abs final CountDownLatch pferLatch = new CountDownLatch( 1 ); final CountDownLatch removeLatch = new CountDownLatch( 1 ); final TransactionManager remoteTm = remoteCollectionRegion.getTransactionManager(); - PutFromLoadValidator validator = new PutFromLoadValidator(remoteTm) { - @Override - public boolean acquirePutFromLoadLock(Object key) { - boolean acquired = super.acquirePutFromLoadLock( key ); - try { - removeLatch.countDown(); - pferLatch.await( 2, TimeUnit.SECONDS ); - } - catch (InterruptedException e) { - log.debug( "Interrupted" ); - Thread.currentThread().interrupt(); - } - catch (Exception e) { - log.error( "Error", e ); - throw new RuntimeException( "Error", e ); - } - return acquired; - } - }; - - final TransactionalAccessDelegate delegate = - new TransactionalAccessDelegate(localCollectionRegion, validator); - final TransactionManager localTm = localCollectionRegion.getTransactionManager(); - - Callable pferCallable = new Callable() { - public Void call() throws Exception { - delegate.putFromLoad( "k1", "v1", 0, null ); - return null; - } - }; - - Callable removeCallable = new Callable() { - public Void call() throws Exception { - removeLatch.await(); - CacheHelper.withinTx(localTm, new Callable() { + withCacheManager(new CacheManagerCallable(TestCacheManagerFactory.createLocalCacheManager(false)) { + @Override + public void call() { + PutFromLoadValidator validator = new PutFromLoadValidator(cm, + remoteTm, 20000) { @Override + public boolean acquirePutFromLoadLock(Object key) { + boolean acquired = super.acquirePutFromLoadLock( key ); + try { + removeLatch.countDown(); + pferLatch.await( 2, TimeUnit.SECONDS ); + } + catch (InterruptedException e) { + log.debug( "Interrupted" ); + Thread.currentThread().interrupt(); + } + catch (Exception e) { + log.error( "Error", e ); + throw new RuntimeException( "Error", e ); + } + return acquired; + } + }; + + final TransactionalAccessDelegate delegate = + new TransactionalAccessDelegate(localCollectionRegion, validator); + final TransactionManager localTm = localCollectionRegion.getTransactionManager(); + + Callable pferCallable = new Callable() { public Void call() throws Exception { - delegate.remove("k1"); + delegate.putFromLoad( "k1", "v1", 0, null ); return null; } - }); - pferLatch.countDown(); - return null; - } - }; + }; - ExecutorService executorService = Executors.newCachedThreadPool(); - Future pferFuture = executorService.submit( pferCallable ); - Future removeFuture = executorService.submit( removeCallable ); + Callable removeCallable = new Callable() { + public Void call() throws Exception { + removeLatch.await(); + Caches.withinTx(localTm, new Callable() { + @Override + public Void call() throws Exception { + delegate.remove("k1"); + return null; + } + }); + pferLatch.countDown(); + return null; + } + }; - pferFuture.get(); - removeFuture.get(); + ExecutorService executorService = Executors.newCachedThreadPool(); + Future pferFuture = executorService.submit( pferCallable ); + Future removeFuture = executorService.submit( removeCallable ); - assertFalse( localCollectionRegion.getCacheAdapter().containsKey( "k1" ) ); + try { + pferFuture.get(); + removeFuture.get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + + assertFalse(localCollectionRegion.getCache().containsKey("k1")); + } + }); } @Test @@ -394,7 +407,7 @@ public abstract class AbstractCollectionRegionAccessStrategyTestCase extends Abs // Wait for async propagation sleep( 250 ); - CacheHelper.withinTx(localCollectionRegion.getTransactionManager(), new Callable() { + Caches.withinTx(localCollectionRegion.getTransactionManager(), new Callable() { @Override public Void call() throws Exception { if (evict) @@ -414,9 +427,9 @@ public abstract class AbstractCollectionRegionAccessStrategyTestCase extends Abs final String KEY = KEY_BASE + testCount++; - assertEquals( 0, getValidKeyCount( localCollectionRegion.getCacheAdapter().keySet() ) ); + assertEquals( 0, getValidKeyCount( localCollectionRegion.getCache().keySet() ) ); - assertEquals( 0, getValidKeyCount( remoteCollectionRegion.getCacheAdapter().keySet() ) ); + assertEquals( 0, getValidKeyCount( remoteCollectionRegion.getCache().keySet() ) ); assertNull( "local is clean", localAccessStrategy.get( KEY, System.currentTimeMillis() ) ); assertNull( "remote is clean", remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); @@ -429,7 +442,7 @@ public abstract class AbstractCollectionRegionAccessStrategyTestCase extends Abs // Wait for async propagation sleep( 250 ); - CacheHelper.withinTx(localCollectionRegion.getTransactionManager(), new Callable() { + Caches.withinTx(localCollectionRegion.getTransactionManager(), new Callable() { @Override public Void call() throws Exception { if (evict) @@ -443,19 +456,19 @@ public abstract class AbstractCollectionRegionAccessStrategyTestCase extends Abs // This should re-establish the region root node assertNull( localAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertEquals( 0, getValidKeyCount( localCollectionRegion.getCacheAdapter().keySet() ) ); + assertEquals( 0, getValidKeyCount( localCollectionRegion.getCache().keySet() ) ); // Re-establishing the region root on the local node doesn't // propagate it to other nodes. Do a get on the remote node to re-establish assertEquals( null, remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertEquals( 0, getValidKeyCount( remoteCollectionRegion.getCacheAdapter().keySet() ) ); + assertEquals( 0, getValidKeyCount( remoteCollectionRegion.getCache().keySet() ) ); // Test whether the get above messes up the optimistic version remoteAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); assertEquals( VALUE1, remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertEquals( 1, getValidKeyCount( remoteCollectionRegion.getCacheAdapter().keySet() ) ); + assertEquals( 1, getValidKeyCount( remoteCollectionRegion.getCache().keySet() ) ); // Wait for async propagation of the putFromLoad sleep( 250 ); diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/CollectionRegionImplTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/CollectionRegionImplTestCase.java index 8e75ab8b9a..6ae961b4f0 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/CollectionRegionImplTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/collection/CollectionRegionImplTestCase.java @@ -27,8 +27,6 @@ import java.util.Properties; import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.CacheAdapterImpl; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.CollectionRegion; import org.hibernate.cache.spi.Region; @@ -36,6 +34,7 @@ import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cache.spi.access.CollectionRegionAccessStrategy; import org.hibernate.test.cache.infinispan.AbstractEntityCollectionRegionTestCase; +import org.infinispan.AdvancedCache; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; @@ -70,8 +69,8 @@ public class CollectionRegionImplTestCase extends AbstractEntityCollectionRegion } @Override - protected CacheAdapter getInfinispanCache(InfinispanRegionFactory regionFactory) { - return CacheAdapterImpl.newInstance(regionFactory.getCacheManager().getCache(InfinispanRegionFactory.DEF_ENTITY_RESOURCE).getAdvancedCache()); + protected AdvancedCache getInfinispanCache(InfinispanRegionFactory regionFactory) { + return regionFactory.getCacheManager().getCache(InfinispanRegionFactory.DEF_ENTITY_RESOURCE).getAdvancedCache(); } @Override diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/AbstractEntityRegionAccessStrategyTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/AbstractEntityRegionAccessStrategyTestCase.java index 1ea00949e5..faaf9cdff3 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/AbstractEntityRegionAccessStrategyTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/AbstractEntityRegionAccessStrategyTestCase.java @@ -29,6 +29,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import junit.framework.AssertionFailedError; +import org.hibernate.cache.infinispan.util.Caches; import org.infinispan.Cache; import org.infinispan.test.TestingUtil; import org.infinispan.transaction.tm.BatchModeTransactionManager; @@ -39,7 +40,6 @@ import org.junit.Test; import org.hibernate.cache.infinispan.InfinispanRegionFactory; import org.hibernate.cache.infinispan.entity.EntityRegionImpl; -import org.hibernate.cache.infinispan.util.CacheHelper; import org.hibernate.cache.internal.CacheDataDescriptionImpl; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.access.AccessType; @@ -61,526 +61,510 @@ import static org.junit.Assert.assertTrue; * @since 3.5 */ public abstract class AbstractEntityRegionAccessStrategyTestCase extends AbstractNonFunctionalTestCase { - private static final Logger log = Logger.getLogger( AbstractEntityRegionAccessStrategyTestCase.class ); - public static final String REGION_NAME = "test/com.foo.test"; - public static final String KEY_BASE = "KEY"; - public static final String VALUE1 = "VALUE1"; - public static final String VALUE2 = "VALUE2"; + private static final Logger log = Logger.getLogger(AbstractEntityRegionAccessStrategyTestCase.class); - protected static int testCount; + public static final String REGION_NAME = "test/com.foo.test"; + public static final String KEY_BASE = "KEY"; + public static final String VALUE1 = "VALUE1"; + public static final String VALUE2 = "VALUE2"; - protected NodeEnvironment localEnvironment; - protected EntityRegionImpl localEntityRegion; - protected EntityRegionAccessStrategy localAccessStrategy; + protected static int testCount; - protected NodeEnvironment remoteEnvironment; - protected EntityRegionImpl remoteEntityRegion; - protected EntityRegionAccessStrategy remoteAccessStrategy; + protected NodeEnvironment localEnvironment; + protected EntityRegionImpl localEntityRegion; + protected EntityRegionAccessStrategy localAccessStrategy; - protected boolean invalidation; - protected boolean synchronous; + protected NodeEnvironment remoteEnvironment; + protected EntityRegionImpl remoteEntityRegion; + protected EntityRegionAccessStrategy remoteAccessStrategy; - protected Exception node1Exception; - protected Exception node2Exception; + protected boolean invalidation; + protected boolean synchronous; - protected AssertionFailedError node1Failure; - protected AssertionFailedError node2Failure; + protected Exception node1Exception; + protected Exception node2Exception; - @Before - public void prepareResources() throws Exception { - // to mimic exactly the old code results, both environments here are exactly the same... - Configuration cfg = createConfiguration( getConfigurationName() ); - localEnvironment = new NodeEnvironment( cfg ); - localEnvironment.prepare(); + protected AssertionFailedError node1Failure; + protected AssertionFailedError node2Failure; - localEntityRegion = localEnvironment.getEntityRegion( REGION_NAME, getCacheDataDescription() ); - localAccessStrategy = localEntityRegion.buildAccessStrategy( getAccessType() ); + @Before + public void prepareResources() throws Exception { + // to mimic exactly the old code results, both environments here are exactly the same... + Configuration cfg = createConfiguration(getConfigurationName()); + localEnvironment = new NodeEnvironment(cfg); + localEnvironment.prepare(); - invalidation = localEntityRegion.getCacheAdapter().isClusteredInvalidation(); - synchronous = localEntityRegion.getCacheAdapter().isSynchronous(); + localEntityRegion = localEnvironment.getEntityRegion(REGION_NAME, getCacheDataDescription()); + localAccessStrategy = localEntityRegion.buildAccessStrategy(getAccessType()); - // Sleep a bit to avoid concurrent FLUSH problem - avoidConcurrentFlush(); + invalidation = Caches.isInvalidationCache(localEntityRegion.getCache()); + synchronous = Caches.isSynchronousCache(localEntityRegion.getCache()); - remoteEnvironment = new NodeEnvironment( cfg ); - remoteEnvironment.prepare(); + // Sleep a bit to avoid concurrent FLUSH problem + avoidConcurrentFlush(); - remoteEntityRegion = remoteEnvironment.getEntityRegion( REGION_NAME, getCacheDataDescription() ); - remoteAccessStrategy = remoteEntityRegion.buildAccessStrategy( getAccessType() ); + remoteEnvironment = new NodeEnvironment(cfg); + remoteEnvironment.prepare(); - waitForClusterToForm(localEntityRegion.getCacheAdapter().getCache(), - remoteEntityRegion.getCacheAdapter().getCache()); - } + remoteEntityRegion = remoteEnvironment.getEntityRegion(REGION_NAME, getCacheDataDescription()); + remoteAccessStrategy = remoteEntityRegion.buildAccessStrategy(getAccessType()); + + waitForClusterToForm(localEntityRegion.getCache(), + remoteEntityRegion.getCache()); + } protected void waitForClusterToForm(Cache... caches) { TestingUtil.blockUntilViewsReceived(10000, Arrays.asList(caches)); } - protected abstract String getConfigurationName(); + protected abstract String getConfigurationName(); - protected static Configuration createConfiguration(String configName) { - Configuration cfg = CacheTestUtil.buildConfiguration( - REGION_PREFIX, - InfinispanRegionFactory.class, - true, - false - ); - cfg.setProperty( InfinispanRegionFactory.ENTITY_CACHE_RESOURCE_PROP, configName ); - return cfg; - } + protected static Configuration createConfiguration(String configName) { + Configuration cfg = CacheTestUtil.buildConfiguration( + REGION_PREFIX, + InfinispanRegionFactory.class, + true, + false + ); + cfg.setProperty(InfinispanRegionFactory.ENTITY_CACHE_RESOURCE_PROP, configName); + return cfg; + } - protected CacheDataDescription getCacheDataDescription() { - return new CacheDataDescriptionImpl( true, true, ComparableComparator.INSTANCE ); - } + protected CacheDataDescription getCacheDataDescription() { + return new CacheDataDescriptionImpl(true, true, ComparableComparator.INSTANCE); + } - @After - public void releaseResources() throws Exception { - if ( localEnvironment != null ) { - localEnvironment.release(); - } - if ( remoteEnvironment != null ) { - remoteEnvironment.release(); - } - } + @After + public void releaseResources() throws Exception { + try { + if (localEnvironment != null) { + localEnvironment.release(); + } + } finally { + if (remoteEnvironment != null) { + remoteEnvironment.release(); + } + } + } - protected abstract AccessType getAccessType(); + protected abstract AccessType getAccessType(); - protected boolean isUsingInvalidation() { - return invalidation; - } + protected boolean isUsingInvalidation() { + return invalidation; + } - protected boolean isSynchronous() { - return synchronous; - } + protected boolean isSynchronous() { + return synchronous; + } - protected void assertThreadsRanCleanly() { - if ( node1Failure != null ) { - throw node1Failure; - } - if ( node2Failure != null ) { - throw node2Failure; - } + protected void assertThreadsRanCleanly() { + if (node1Failure != null) { + throw node1Failure; + } + if (node2Failure != null) { + throw node2Failure; + } - if ( node1Exception != null ) { - log.error("node1 saw an exception", node1Exception); - assertEquals( "node1 saw no exceptions", null, node1Exception ); - } + if (node1Exception != null) { + log.error("node1 saw an exception", node1Exception); + assertEquals("node1 saw no exceptions", null, node1Exception); + } - if ( node2Exception != null ) { - log.error("node2 saw an exception", node2Exception); - assertEquals( "node2 saw no exceptions", null, node2Exception ); - } - } + if (node2Exception != null) { + log.error("node2 saw an exception", node2Exception); + assertEquals("node2 saw no exceptions", null, node2Exception); + } + } - @Test - public abstract void testCacheConfiguration(); + @Test + public abstract void testCacheConfiguration(); - @Test - public void testGetRegion() { - assertEquals( "Correct region", localEntityRegion, localAccessStrategy.getRegion() ); - } + @Test + public void testGetRegion() { + assertEquals("Correct region", localEntityRegion, localAccessStrategy.getRegion()); + } - @Test - public void testPutFromLoad() throws Exception { - putFromLoadTest( false ); - } + @Test + public void testPutFromLoad() throws Exception { + putFromLoadTest(false); + } - @Test - public void testPutFromLoadMinimal() throws Exception { - putFromLoadTest( true ); - } + @Test + public void testPutFromLoadMinimal() throws Exception { + putFromLoadTest(true); + } - /** - * Simulate 2 nodes, both start, tx do a get, experience a cache miss, then 'read from db.' First - * does a putFromLoad, then an update. Second tries to do a putFromLoad with stale data (i.e. it - * took longer to read from the db). Both commit their tx. Then both start a new tx and get. - * First should see the updated data; second should either see the updated data (isInvalidation() - * == false) or null (isInvalidation() == true). - * - * @param useMinimalAPI - * @throws Exception - */ - private void putFromLoadTest(final boolean useMinimalAPI) throws Exception { + /** + * Simulate 2 nodes, both start, tx do a get, experience a cache miss, then + * 'read from db.' First does a putFromLoad, then an update. Second tries to + * do a putFromLoad with stale data (i.e. it took longer to read from the db). + * Both commit their tx. Then both start a new tx and get. First should see + * the updated data; second should either see the updated data + * (isInvalidation() == false) or null (isInvalidation() == true). + * + * @param useMinimalAPI + * @throws Exception + */ + private void putFromLoadTest(final boolean useMinimalAPI) throws Exception { - final String KEY = KEY_BASE + testCount++; + final String KEY = KEY_BASE + testCount++; - final CountDownLatch writeLatch1 = new CountDownLatch( 1 ); - final CountDownLatch writeLatch2 = new CountDownLatch( 1 ); - final CountDownLatch completionLatch = new CountDownLatch( 2 ); + final CountDownLatch writeLatch1 = new CountDownLatch(1); + final CountDownLatch writeLatch2 = new CountDownLatch(1); + final CountDownLatch completionLatch = new CountDownLatch(2); - Thread node1 = new Thread() { + Thread node1 = new Thread() { - @Override - public void run() { + @Override + public void run() { - try { - long txTimestamp = System.currentTimeMillis(); - BatchModeTransactionManager.getInstance().begin(); + try { + long txTimestamp = System.currentTimeMillis(); + BatchModeTransactionManager.getInstance().begin(); - assertNull( "node1 starts clean", localAccessStrategy.get( KEY, txTimestamp ) ); + assertNull("node1 starts clean", localAccessStrategy.get(KEY, txTimestamp)); - writeLatch1.await(); + writeLatch1.await(); - if ( useMinimalAPI ) { - localAccessStrategy.putFromLoad( KEY, VALUE1, txTimestamp, new Integer( 1 ), true ); - } - else { - localAccessStrategy.putFromLoad( KEY, VALUE1, txTimestamp, new Integer( 1 ) ); - } + if (useMinimalAPI) { + localAccessStrategy.putFromLoad(KEY, VALUE1, txTimestamp, new Integer(1), true); + } else { + localAccessStrategy.putFromLoad(KEY, VALUE1, txTimestamp, new Integer(1)); + } - localAccessStrategy.update( KEY, VALUE2, new Integer( 2 ), new Integer( 1 ) ); + localAccessStrategy.update(KEY, VALUE2, new Integer(2), new Integer(1)); - BatchModeTransactionManager.getInstance().commit(); - } - catch (Exception e) { - log.error("node1 caught exception", e); - node1Exception = e; - rollback(); - } - catch (AssertionFailedError e) { - node1Failure = e; - rollback(); - } - finally { - // Let node2 write - writeLatch2.countDown(); - completionLatch.countDown(); - } - } - }; + BatchModeTransactionManager.getInstance().commit(); + } catch (Exception e) { + log.error("node1 caught exception", e); + node1Exception = e; + rollback(); + } catch (AssertionFailedError e) { + node1Failure = e; + rollback(); + } finally { + // Let node2 write + writeLatch2.countDown(); + completionLatch.countDown(); + } + } + }; - Thread node2 = new Thread() { + Thread node2 = new Thread() { - @Override - public void run() { + @Override + public void run() { - try { - long txTimestamp = System.currentTimeMillis(); - BatchModeTransactionManager.getInstance().begin(); + try { + long txTimestamp = System.currentTimeMillis(); + BatchModeTransactionManager.getInstance().begin(); - assertNull( "node1 starts clean", remoteAccessStrategy.get( KEY, txTimestamp ) ); + assertNull("node1 starts clean", remoteAccessStrategy.get(KEY, txTimestamp)); - // Let node1 write - writeLatch1.countDown(); - // Wait for node1 to finish - writeLatch2.await(); + // Let node1 write + writeLatch1.countDown(); + // Wait for node1 to finish + writeLatch2.await(); - if ( useMinimalAPI ) { - remoteAccessStrategy.putFromLoad( KEY, VALUE1, txTimestamp, new Integer( 1 ), true ); - } - else { - remoteAccessStrategy.putFromLoad( KEY, VALUE1, txTimestamp, new Integer( 1 ) ); - } + if (useMinimalAPI) { + remoteAccessStrategy.putFromLoad(KEY, VALUE1, txTimestamp, new Integer(1), true); + } else { + remoteAccessStrategy.putFromLoad(KEY, VALUE1, txTimestamp, new Integer(1)); + } - BatchModeTransactionManager.getInstance().commit(); - } - catch (Exception e) { - log.error("node2 caught exception", e); - node2Exception = e; - rollback(); - } - catch (AssertionFailedError e) { - node2Failure = e; - rollback(); - } - finally { - completionLatch.countDown(); - } - } - }; + BatchModeTransactionManager.getInstance().commit(); + } catch (Exception e) { + log.error("node2 caught exception", e); + node2Exception = e; + rollback(); + } catch (AssertionFailedError e) { + node2Failure = e; + rollback(); + } finally { + completionLatch.countDown(); + } + } + }; - node1.setDaemon( true ); - node2.setDaemon( true ); + node1.setDaemon(true); + node2.setDaemon(true); - node1.start(); - node2.start(); + node1.start(); + node2.start(); - assertTrue( "Threads completed", completionLatch.await( 2, TimeUnit.SECONDS ) ); + assertTrue("Threads completed", completionLatch.await(2, TimeUnit.SECONDS)); - assertThreadsRanCleanly(); + assertThreadsRanCleanly(); - long txTimestamp = System.currentTimeMillis(); - assertEquals( "Correct node1 value", VALUE2, localAccessStrategy.get( KEY, txTimestamp ) ); + long txTimestamp = System.currentTimeMillis(); + assertEquals("Correct node1 value", VALUE2, localAccessStrategy.get(KEY, txTimestamp)); - if ( isUsingInvalidation() ) { - // no data version to prevent the PFER; we count on db locks preventing this - assertEquals( "Expected node2 value", VALUE1, remoteAccessStrategy.get( KEY, txTimestamp ) ); - } - else { - // The node1 update is replicated, preventing the node2 PFER - assertEquals( "Correct node2 value", VALUE2, remoteAccessStrategy.get( KEY, txTimestamp ) ); - } - } + if (isUsingInvalidation()) { + // no data version to prevent the PFER; we count on db locks preventing this + assertEquals("Expected node2 value", VALUE1, remoteAccessStrategy.get(KEY, txTimestamp)); + } else { + // The node1 update is replicated, preventing the node2 PFER + assertEquals("Correct node2 value", VALUE2, remoteAccessStrategy.get(KEY, txTimestamp)); + } + } - @Test - public void testInsert() throws Exception { + @Test + public void testInsert() throws Exception { - final String KEY = KEY_BASE + testCount++; + final String KEY = KEY_BASE + testCount++; - final CountDownLatch readLatch = new CountDownLatch( 1 ); - final CountDownLatch commitLatch = new CountDownLatch( 1 ); - final CountDownLatch completionLatch = new CountDownLatch( 2 ); + final CountDownLatch readLatch = new CountDownLatch(1); + final CountDownLatch commitLatch = new CountDownLatch(1); + final CountDownLatch completionLatch = new CountDownLatch(2); - Thread inserter = new Thread() { + Thread inserter = new Thread() { - @Override - public void run() { + @Override + public void run() { - try { - long txTimestamp = System.currentTimeMillis(); - BatchModeTransactionManager.getInstance().begin(); + try { + long txTimestamp = System.currentTimeMillis(); + BatchModeTransactionManager.getInstance().begin(); - assertNull( "Correct initial value", localAccessStrategy.get( KEY, txTimestamp ) ); + assertNull("Correct initial value", localAccessStrategy.get(KEY, txTimestamp)); - localAccessStrategy.insert( KEY, VALUE1, new Integer( 1 ) ); + localAccessStrategy.insert(KEY, VALUE1, new Integer(1)); - readLatch.countDown(); - commitLatch.await(); + readLatch.countDown(); + commitLatch.await(); - BatchModeTransactionManager.getInstance().commit(); - } - catch (Exception e) { - log.error("node1 caught exception", e); - node1Exception = e; - rollback(); - } - catch (AssertionFailedError e) { - node1Failure = e; - rollback(); - } - finally { - completionLatch.countDown(); - } - } - }; + BatchModeTransactionManager.getInstance().commit(); + } catch (Exception e) { + log.error("node1 caught exception", e); + node1Exception = e; + rollback(); + } catch (AssertionFailedError e) { + node1Failure = e; + rollback(); + } finally { + completionLatch.countDown(); + } + } + }; - Thread reader = new Thread() { + Thread reader = new Thread() { - @Override - public void run() { + @Override + public void run() { - try { - long txTimestamp = System.currentTimeMillis(); - BatchModeTransactionManager.getInstance().begin(); + try { + long txTimestamp = System.currentTimeMillis(); + BatchModeTransactionManager.getInstance().begin(); - readLatch.await(); + readLatch.await(); // Object expected = !isBlockingReads() ? null : VALUE1; - Object expected = null; + Object expected = null; - assertEquals( - "Correct initial value", expected, localAccessStrategy.get( - KEY, - txTimestamp - ) - ); + assertEquals( + "Correct initial value", expected, localAccessStrategy.get( + KEY, + txTimestamp + ) + ); - BatchModeTransactionManager.getInstance().commit(); - } - catch (Exception e) { - log.error("node1 caught exception", e); - node1Exception = e; - rollback(); - } - catch (AssertionFailedError e) { - node1Failure = e; - rollback(); - } - finally { - commitLatch.countDown(); - completionLatch.countDown(); - } - } - }; + BatchModeTransactionManager.getInstance().commit(); + } catch (Exception e) { + log.error("node1 caught exception", e); + node1Exception = e; + rollback(); + } catch (AssertionFailedError e) { + node1Failure = e; + rollback(); + } finally { + commitLatch.countDown(); + completionLatch.countDown(); + } + } + }; - inserter.setDaemon( true ); - reader.setDaemon( true ); - inserter.start(); - reader.start(); + inserter.setDaemon(true); + reader.setDaemon(true); + inserter.start(); + reader.start(); - assertTrue( "Threads completed", completionLatch.await( 1, TimeUnit.SECONDS ) ); + assertTrue("Threads completed", completionLatch.await(1, TimeUnit.SECONDS)); - assertThreadsRanCleanly(); + assertThreadsRanCleanly(); - long txTimestamp = System.currentTimeMillis(); - assertEquals( "Correct node1 value", VALUE1, localAccessStrategy.get( KEY, txTimestamp ) ); - Object expected = isUsingInvalidation() ? null : VALUE1; - assertEquals( "Correct node2 value", expected, remoteAccessStrategy.get( KEY, txTimestamp ) ); - } + long txTimestamp = System.currentTimeMillis(); + assertEquals("Correct node1 value", VALUE1, localAccessStrategy.get(KEY, txTimestamp)); + Object expected = isUsingInvalidation() ? null : VALUE1; + assertEquals("Correct node2 value", expected, remoteAccessStrategy.get(KEY, txTimestamp)); + } - @Test - public void testUpdate() throws Exception { + @Test + public void testUpdate() throws Exception { - final String KEY = KEY_BASE + testCount++; + final String KEY = KEY_BASE + testCount++; - // Set up initial state - localAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); - remoteAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); + // Set up initial state + localAccessStrategy.putFromLoad(KEY, VALUE1, System.currentTimeMillis(), new Integer(1)); + remoteAccessStrategy.putFromLoad(KEY, VALUE1, System.currentTimeMillis(), new Integer(1)); - // Let the async put propagate - sleep( 250 ); + // Let the async put propagate + sleep(250); - final CountDownLatch readLatch = new CountDownLatch( 1 ); - final CountDownLatch commitLatch = new CountDownLatch( 1 ); - final CountDownLatch completionLatch = new CountDownLatch( 2 ); + final CountDownLatch readLatch = new CountDownLatch(1); + final CountDownLatch commitLatch = new CountDownLatch(1); + final CountDownLatch completionLatch = new CountDownLatch(2); - Thread updater = new Thread( "testUpdate-updater" ) { + Thread updater = new Thread("testUpdate-updater") { - @Override - public void run() { - boolean readerUnlocked = false; - try { - long txTimestamp = System.currentTimeMillis(); - BatchModeTransactionManager.getInstance().begin(); - log.debug("Transaction began, get initial value"); - assertEquals( "Correct initial value", VALUE1, localAccessStrategy.get( KEY, txTimestamp ) ); - log.debug("Now update value"); - localAccessStrategy.update( KEY, VALUE2, new Integer( 2 ), new Integer( 1 ) ); - log.debug("Notify the read latch"); - readLatch.countDown(); - readerUnlocked = true; - log.debug("Await commit"); - commitLatch.await(); - BatchModeTransactionManager.getInstance().commit(); - } - catch (Exception e) { - log.error("node1 caught exception", e); - node1Exception = e; - rollback(); - } - catch (AssertionFailedError e) { - node1Failure = e; - rollback(); - } - finally { - if ( !readerUnlocked ) { - readLatch.countDown(); - } - log.debug("Completion latch countdown"); - completionLatch.countDown(); - } - } - }; + @Override + public void run() { + boolean readerUnlocked = false; + try { + long txTimestamp = System.currentTimeMillis(); + BatchModeTransactionManager.getInstance().begin(); + log.debug("Transaction began, get initial value"); + assertEquals("Correct initial value", VALUE1, localAccessStrategy.get(KEY, txTimestamp)); + log.debug("Now update value"); + localAccessStrategy.update(KEY, VALUE2, new Integer(2), new Integer(1)); + log.debug("Notify the read latch"); + readLatch.countDown(); + readerUnlocked = true; + log.debug("Await commit"); + commitLatch.await(); + BatchModeTransactionManager.getInstance().commit(); + } catch (Exception e) { + log.error("node1 caught exception", e); + node1Exception = e; + rollback(); + } catch (AssertionFailedError e) { + node1Failure = e; + rollback(); + } finally { + if (!readerUnlocked) { + readLatch.countDown(); + } + log.debug("Completion latch countdown"); + completionLatch.countDown(); + } + } + }; - Thread reader = new Thread( "testUpdate-reader" ) { + Thread reader = new Thread("testUpdate-reader") { - @Override - public void run() { - try { - long txTimestamp = System.currentTimeMillis(); - BatchModeTransactionManager.getInstance().begin(); - log.debug("Transaction began, await read latch"); - readLatch.await(); - log.debug("Read latch acquired, verify local access strategy"); + @Override + public void run() { + try { + long txTimestamp = System.currentTimeMillis(); + BatchModeTransactionManager.getInstance().begin(); + log.debug("Transaction began, await read latch"); + readLatch.await(); + log.debug("Read latch acquired, verify local access strategy"); - // This won't block w/ mvc and will read the old value - Object expected = VALUE1; - assertEquals( "Correct value", expected, localAccessStrategy.get( KEY, txTimestamp ) ); + // This won't block w/ mvc and will read the old value + Object expected = VALUE1; + assertEquals("Correct value", expected, localAccessStrategy.get(KEY, txTimestamp)); - BatchModeTransactionManager.getInstance().commit(); - } - catch (Exception e) { - log.error("node1 caught exception", e); - node1Exception = e; - rollback(); - } - catch (AssertionFailedError e) { - node1Failure = e; - rollback(); - } - finally { - commitLatch.countDown(); - log.debug("Completion latch countdown"); - completionLatch.countDown(); - } - } - }; + BatchModeTransactionManager.getInstance().commit(); + } catch (Exception e) { + log.error("node1 caught exception", e); + node1Exception = e; + rollback(); + } catch (AssertionFailedError e) { + node1Failure = e; + rollback(); + } finally { + commitLatch.countDown(); + log.debug("Completion latch countdown"); + completionLatch.countDown(); + } + } + }; - updater.setDaemon( true ); - reader.setDaemon( true ); - updater.start(); - reader.start(); + updater.setDaemon(true); + reader.setDaemon(true); + updater.start(); + reader.start(); - // Should complete promptly - assertTrue( completionLatch.await( 2, TimeUnit.SECONDS ) ); + // Should complete promptly + assertTrue(completionLatch.await(2, TimeUnit.SECONDS)); - assertThreadsRanCleanly(); + assertThreadsRanCleanly(); - long txTimestamp = System.currentTimeMillis(); - assertEquals( "Correct node1 value", VALUE2, localAccessStrategy.get( KEY, txTimestamp ) ); - Object expected = isUsingInvalidation() ? null : VALUE2; - assertEquals( "Correct node2 value", expected, remoteAccessStrategy.get( KEY, txTimestamp ) ); - } + long txTimestamp = System.currentTimeMillis(); + assertEquals("Correct node1 value", VALUE2, localAccessStrategy.get(KEY, txTimestamp)); + Object expected = isUsingInvalidation() ? null : VALUE2; + assertEquals("Correct node2 value", expected, remoteAccessStrategy.get(KEY, txTimestamp)); + } - @Test - public void testRemove() throws Exception { - evictOrRemoveTest( false ); - } + @Test + public void testRemove() throws Exception { + evictOrRemoveTest(false); + } - @Test - public void testRemoveAll() throws Exception { - evictOrRemoveAllTest( false ); - } + @Test + public void testRemoveAll() throws Exception { + evictOrRemoveAllTest(false); + } - @Test - public void testEvict() throws Exception { - evictOrRemoveTest( true ); - } + @Test + public void testEvict() throws Exception { + evictOrRemoveTest(true); + } - @Test - public void testEvictAll() throws Exception { - evictOrRemoveAllTest( true ); - } + @Test + public void testEvictAll() throws Exception { + evictOrRemoveAllTest(true); + } - private void evictOrRemoveTest(final boolean evict) throws Exception { - final String KEY = KEY_BASE + testCount++; - assertEquals( 0, getValidKeyCount( localEntityRegion.getCacheAdapter().keySet() ) ); - assertEquals( 0, getValidKeyCount( remoteEntityRegion.getCacheAdapter().keySet() ) ); + private void evictOrRemoveTest(final boolean evict) throws Exception { + final String KEY = KEY_BASE + testCount++; + assertEquals(0, getValidKeyCount(localEntityRegion.getCache().keySet())); + assertEquals(0, getValidKeyCount(remoteEntityRegion.getCache().keySet())); - assertNull( "local is clean", localAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertNull( "remote is clean", remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); + assertNull("local is clean", localAccessStrategy.get(KEY, System.currentTimeMillis())); + assertNull("remote is clean", remoteAccessStrategy.get(KEY, System.currentTimeMillis())); - localAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); - assertEquals( VALUE1, localAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - remoteAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); - assertEquals( VALUE1, remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); + localAccessStrategy.putFromLoad(KEY, VALUE1, System.currentTimeMillis(), new Integer(1)); + assertEquals(VALUE1, localAccessStrategy.get(KEY, System.currentTimeMillis())); + remoteAccessStrategy.putFromLoad(KEY, VALUE1, System.currentTimeMillis(), new Integer(1)); + assertEquals(VALUE1, remoteAccessStrategy.get(KEY, System.currentTimeMillis())); - CacheHelper.withinTx(localEntityRegion.getTransactionManager(), new Callable() { + Caches.withinTx(localEntityRegion.getTransactionManager(), new Callable() { @Override public Void call() throws Exception { - if ( evict ) - localAccessStrategy.evict( KEY ); + if (evict) + localAccessStrategy.evict(KEY); else - localAccessStrategy.remove( KEY ); + localAccessStrategy.remove(KEY); return null; } }); - assertEquals(null, localAccessStrategy.get(KEY, System.currentTimeMillis())); - assertEquals( 0, getValidKeyCount( localEntityRegion.getCacheAdapter().keySet() ) ); - assertEquals( null, remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertEquals( 0, getValidKeyCount( remoteEntityRegion.getCacheAdapter().keySet() ) ); - } + assertEquals(null, localAccessStrategy.get(KEY, System.currentTimeMillis())); + assertEquals(0, getValidKeyCount(localEntityRegion.getCache().keySet())); + assertEquals(null, remoteAccessStrategy.get(KEY, System.currentTimeMillis())); + assertEquals(0, getValidKeyCount(remoteEntityRegion.getCache().keySet())); + } - private void evictOrRemoveAllTest(final boolean evict) throws Exception { - final String KEY = KEY_BASE + testCount++; - assertEquals( 0, getValidKeyCount( localEntityRegion.getCacheAdapter().keySet() ) ); - assertEquals( 0, getValidKeyCount( remoteEntityRegion.getCacheAdapter().keySet() ) ); - assertNull( "local is clean", localAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertNull( "remote is clean", remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); + private void evictOrRemoveAllTest(final boolean evict) throws Exception { + final String KEY = KEY_BASE + testCount++; + assertEquals(0, getValidKeyCount(localEntityRegion.getCache().keySet())); + assertEquals(0, getValidKeyCount(remoteEntityRegion.getCache().keySet())); + assertNull("local is clean", localAccessStrategy.get(KEY, System.currentTimeMillis())); + assertNull("remote is clean", remoteAccessStrategy.get(KEY, System.currentTimeMillis())); - localAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); - assertEquals( VALUE1, localAccessStrategy.get( KEY, System.currentTimeMillis() ) ); + localAccessStrategy.putFromLoad(KEY, VALUE1, System.currentTimeMillis(), new Integer(1)); + assertEquals(VALUE1, localAccessStrategy.get(KEY, System.currentTimeMillis())); - // Wait for async propagation - sleep( 250 ); + // Wait for async propagation + sleep(250); - remoteAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); - assertEquals( VALUE1, remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); + remoteAccessStrategy.putFromLoad(KEY, VALUE1, System.currentTimeMillis(), new Integer(1)); + assertEquals(VALUE1, remoteAccessStrategy.get(KEY, System.currentTimeMillis())); - // Wait for async propagation - sleep( 250 ); + // Wait for async propagation + sleep(250); - CacheHelper.withinTx(localEntityRegion.getTransactionManager(), new Callable() { + Caches.withinTx(localEntityRegion.getTransactionManager(), new Callable() { @Override public Void call() throws Exception { if (evict) { @@ -593,41 +577,40 @@ public abstract class AbstractEntityRegionAccessStrategyTestCase extends Abstrac } }); - // This should re-establish the region root node in the optimistic case - assertNull(localAccessStrategy.get(KEY, System.currentTimeMillis())); - assertEquals( 0, getValidKeyCount( localEntityRegion.getCacheAdapter().keySet() ) ); + // This should re-establish the region root node in the optimistic case + assertNull(localAccessStrategy.get(KEY, System.currentTimeMillis())); + assertEquals(0, getValidKeyCount(localEntityRegion.getCache().keySet())); - // Re-establishing the region root on the local node doesn't - // propagate it to other nodes. Do a get on the remote node to re-establish - assertEquals( null, remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertEquals( 0, getValidKeyCount( remoteEntityRegion.getCacheAdapter().keySet() ) ); + // Re-establishing the region root on the local node doesn't + // propagate it to other nodes. Do a get on the remote node to re-establish + assertEquals(null, remoteAccessStrategy.get(KEY, System.currentTimeMillis())); + assertEquals(0, getValidKeyCount(remoteEntityRegion.getCache().keySet())); - // Test whether the get above messes up the optimistic version - remoteAccessStrategy.putFromLoad( KEY, VALUE1, System.currentTimeMillis(), new Integer( 1 ) ); - assertEquals( VALUE1, remoteAccessStrategy.get( KEY, System.currentTimeMillis() ) ); - assertEquals( 1, getValidKeyCount( remoteEntityRegion.getCacheAdapter().keySet() ) ); + // Test whether the get above messes up the optimistic version + remoteAccessStrategy.putFromLoad(KEY, VALUE1, System.currentTimeMillis(), new Integer(1)); + assertEquals(VALUE1, remoteAccessStrategy.get(KEY, System.currentTimeMillis())); + assertEquals(1, getValidKeyCount(remoteEntityRegion.getCache().keySet())); - // Wait for async propagation - sleep( 250 ); + // Wait for async propagation + sleep(250); - assertEquals( - "local is correct", (isUsingInvalidation() ? null : VALUE1), localAccessStrategy - .get( KEY, System.currentTimeMillis() ) - ); - assertEquals( - "remote is correct", VALUE1, remoteAccessStrategy.get( - KEY, System - .currentTimeMillis() - ) - ); - } + assertEquals( + "local is correct", (isUsingInvalidation() ? null : VALUE1), localAccessStrategy + .get(KEY, System.currentTimeMillis()) + ); + assertEquals( + "remote is correct", VALUE1, remoteAccessStrategy.get( + KEY, System + .currentTimeMillis() + ) + ); + } - protected void rollback() { - try { - BatchModeTransactionManager.getInstance().rollback(); - } - catch (Exception e) { - log.error(e.getMessage(), e); - } - } + protected void rollback() { + try { + BatchModeTransactionManager.getInstance().rollback(); + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } } diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/EntityRegionImplTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/EntityRegionImplTestCase.java index 5fed2f1ec3..1e64663b32 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/EntityRegionImplTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/entity/EntityRegionImplTestCase.java @@ -27,14 +27,13 @@ import java.util.Properties; import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.CacheAdapterImpl; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.EntityRegion; import org.hibernate.cache.spi.Region; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.test.cache.infinispan.AbstractEntityCollectionRegionTestCase; +import org.infinispan.AdvancedCache; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; @@ -81,8 +80,9 @@ public class EntityRegionImplTestCase extends AbstractEntityCollectionRegionTest } @Override - protected CacheAdapter getInfinispanCache(InfinispanRegionFactory regionFactory) { - return CacheAdapterImpl.newInstance(regionFactory.getCacheManager().getCache(InfinispanRegionFactory.DEF_ENTITY_RESOURCE).getAdvancedCache()); + protected AdvancedCache getInfinispanCache(InfinispanRegionFactory regionFactory) { + return regionFactory.getCacheManager().getCache( + InfinispanRegionFactory.DEF_ENTITY_RESOURCE).getAdvancedCache(); } } diff --git a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheHelper.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/Age.java similarity index 53% rename from hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheHelper.java rename to hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/Age.java index ff1f6bd80e..218c711d4e 100644 --- a/hibernate-infinispan/src/main/java/org/hibernate/cache/infinispan/util/CacheHelper.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/Age.java @@ -1,10 +1,10 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * - * Copyright (c) 2007, Red Hat, Inc. and/or it's affiliates or third-party contributors as + * Copyright (c) 2012, Red Hat, Inc or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors.  All third-party contributions are - * distributed under license by Red Hat, Inc. and/or it's affiliates. + * distributed under license by Red Hat Middleware LLC. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU @@ -21,37 +21,41 @@ * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ -package org.hibernate.cache.infinispan.util; -import java.util.concurrent.Callable; -import javax.transaction.Status; -import javax.transaction.TransactionManager; +package org.hibernate.test.cache.infinispan.functional; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; /** - * Helper for dealing with Infinisan cache instances. - * * @author Galder Zamarreño - * @since 3.5 */ -public class CacheHelper { +@NamedQueries({@NamedQuery(name=Age.QUERY, query = "SELECT a FROM Age a")}) +@Entity +public class Age { - /** - * Disallow external instantiation of CacheHelper. - */ - private CacheHelper() { + public static final String QUERY = "Age.findAll"; + + @Id + @GeneratedValue + private Integer id; + private Integer age; + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public Integer getAge() { + return age; } - public static T withinTx(TransactionManager tm, Callable c) throws Exception { - tm.begin(); - try { - return c.call(); - } catch (Exception e) { - tm.setRollbackOnly(); - throw e; - } finally { - if (tm.getStatus() == Status.STATUS_ACTIVE) tm.commit(); - else tm.rollback(); - } + public void setAge(Integer age) { + this.age = age; } - } diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/ConcurrentWriteTest.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/ConcurrentWriteTest.java index 7ef3ca0ebc..9b236fc368 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/ConcurrentWriteTest.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/ConcurrentWriteTest.java @@ -44,8 +44,6 @@ import org.junit.Test; import org.hibernate.FlushMode; import org.hibernate.Session; -import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cfg.Configuration; import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; @@ -104,11 +102,6 @@ public class ConcurrentWriteTest extends SingleNodeTestCase { return DualNodeJtaTransactionManagerImpl.getInstance( DualNodeTestCase.LOCAL ); } - @Override - protected Class getCacheRegionFactory() { - return InfinispanRegionFactory.class; - } - @Override protected Class getConnectionProviderClass() { return DualNodeConnectionProviderImpl.class; diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/SingleNodeTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/SingleNodeTestCase.java index 590a7d8fad..9a4c945ebd 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/SingleNodeTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/SingleNodeTestCase.java @@ -26,6 +26,9 @@ package org.hibernate.test.cache.infinispan.functional; import javax.transaction.Status; import javax.transaction.TransactionManager; +import org.infinispan.configuration.parsing.ConfigurationBuilderHolder; +import org.infinispan.manager.EmbeddedCacheManager; +import org.infinispan.test.fwk.TestCacheManagerFactory; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; import org.junit.Before; @@ -86,7 +89,7 @@ public abstract class SingleNodeTestCase extends BaseCoreFunctionalTestCase { } protected Class getCacheRegionFactory() { - return InfinispanRegionFactory.class; + return TestInfinispanRegionFactory.class; } protected Class getTransactionFactoryClass() { @@ -148,4 +151,17 @@ public abstract class SingleNodeTestCase extends BaseCoreFunctionalTestCase { } } -} \ No newline at end of file + public static class TestInfinispanRegionFactory extends InfinispanRegionFactory { + + public TestInfinispanRegionFactory() { + super(); // For reflection-based instantiation + } + + @Override + protected EmbeddedCacheManager createCacheManager(ConfigurationBuilderHolder holder) { + return TestCacheManagerFactory.createClusteredCacheManager(holder); + } + + } + +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/bulk/BulkOperationsTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/bulk/BulkOperationsTestCase.java index ffc9da4ac4..1d4534932c 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/bulk/BulkOperationsTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/bulk/BulkOperationsTestCase.java @@ -27,11 +27,11 @@ import java.util.Set; import javax.transaction.Status; import javax.transaction.TransactionManager; +import org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase; import org.junit.Test; import org.hibernate.FlushMode; import org.hibernate.Session; -import org.hibernate.cache.infinispan.InfinispanRegionFactory; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.Configuration; @@ -73,7 +73,7 @@ public class BulkOperationsTestCase extends BaseCoreFunctionalTestCase { } protected Class getCacheRegionFactory() { - return InfinispanRegionFactory.class; + return SingleNodeTestCase.TestInfinispanRegionFactory.class; } protected Class getTransactionFactoryClass() { diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/classloader/IsolatedClassLoaderTest.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/classloader/IsolatedClassLoaderTest.java index bc626ad2f9..108a50faab 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/classloader/IsolatedClassLoaderTest.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/classloader/IsolatedClassLoaderTest.java @@ -69,7 +69,7 @@ public class IsolatedClassLoaderTest extends DualNodeTestCase { private static ClassLoader originalTCCL; - private static ClassLoader visibleClassesCl; +// private static ClassLoader visibleClassesCl; @BeforeClass public static void prepareClassLoader() { @@ -200,12 +200,10 @@ public class IsolatedClassLoaderTest extends DualNodeTestCase { if ( useNamedRegion ) { cacheName = "AccountRegion"; // As defined by ClassLoaderTestDAO via calls to query.setCacheRegion // Define cache configurations for region early to avoid ending up with local caches for this region - localManager.defineConfiguration( - cacheName, "replicated-query", new org.infinispan.config.Configuration() - ); - remoteManager.defineConfiguration( - cacheName, "replicated-query", new org.infinispan.config.Configuration() - ); + localManager.defineConfiguration(cacheName, + localManager.getCacheConfiguration("replicated-query")); + remoteManager.defineConfiguration(cacheName, + remoteManager.getCacheConfiguration("replicated-query")); } else { cacheName = "replicated-query"; diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/cluster/ClusterAwareRegionFactory.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/cluster/ClusterAwareRegionFactory.java index 8156017da8..51dc1ed822 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/cluster/ClusterAwareRegionFactory.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/functional/cluster/ClusterAwareRegionFactory.java @@ -24,6 +24,7 @@ package org.hibernate.test.cache.infinispan.functional.cluster; import java.util.Hashtable; import java.util.Properties; +import org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase; import org.infinispan.manager.EmbeddedCacheManager; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; @@ -55,7 +56,8 @@ public class ClusterAwareRegionFactory extends AbstractRegionFactory { private static final Log log = LogFactory.getLog(ClusterAwareRegionFactory.class); private static final Hashtable cacheManagers = new Hashtable(); - private final InfinispanRegionFactory delegate = new InfinispanRegionFactory(); + private final InfinispanRegionFactory delegate = + new SingleNodeTestCase.TestInfinispanRegionFactory(); private String cacheManagerName; private boolean locallyAdded; diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/query/QueryRegionImplTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/query/QueryRegionImplTestCase.java index c9b1373270..3ec976b6b1 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/query/QueryRegionImplTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/query/QueryRegionImplTestCase.java @@ -29,6 +29,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import junit.framework.AssertionFailedError; +import org.hibernate.cache.infinispan.util.Caches; +import org.infinispan.AdvancedCache; import org.infinispan.notifications.Listener; import org.infinispan.notifications.cachelistener.annotation.CacheEntryVisited; import org.infinispan.notifications.cachelistener.event.CacheEntryVisitedEvent; @@ -38,9 +40,6 @@ import org.jboss.logging.Logger; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.CacheAdapterImpl; -import org.hibernate.cache.infinispan.util.CacheHelper; import org.hibernate.cache.internal.StandardQueryCache; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.spi.GeneralDataRegion; @@ -80,7 +79,7 @@ public class QueryRegionImplTestCase extends AbstractGeneralDataRegionTestCase { @Override protected void regionPut(final GeneralDataRegion region) throws Exception { - CacheHelper.withinTx(BatchModeTransactionManager.getInstance(), new Callable() { + Caches.withinTx(BatchModeTransactionManager.getInstance(), new Callable() { @Override public Void call() throws Exception { region.put(KEY, VALUE1); @@ -91,7 +90,7 @@ public class QueryRegionImplTestCase extends AbstractGeneralDataRegionTestCase { @Override protected void regionEvict(final GeneralDataRegion region) throws Exception { - CacheHelper.withinTx(BatchModeTransactionManager.getInstance(), new Callable() { + Caches.withinTx(BatchModeTransactionManager.getInstance(), new Callable() { @Override public Void call() throws Exception { region.evict(KEY); @@ -101,8 +100,8 @@ public class QueryRegionImplTestCase extends AbstractGeneralDataRegionTestCase { } @Override - protected CacheAdapter getInfinispanCache(InfinispanRegionFactory regionFactory) { - return CacheAdapterImpl.newInstance(regionFactory.getCacheManager().getCache( "local-query" ).getAdvancedCache()); + protected AdvancedCache getInfinispanCache(InfinispanRegionFactory regionFactory) { + return regionFactory.getCacheManager().getCache( "local-query" ).getAdvancedCache(); } @Override @@ -230,7 +229,7 @@ public class QueryRegionImplTestCase extends AbstractGeneralDataRegionTestCase { assertEquals( VALUE1, region.get( KEY ) ); // final Fqn rootFqn = getRegionFqn(getStandardRegionName(REGION_PREFIX), REGION_PREFIX); - final CacheAdapter jbc = getInfinispanCache( regionFactory ); + final AdvancedCache jbc = getInfinispanCache(regionFactory); final CountDownLatch blockerLatch = new CountDownLatch( 1 ); final CountDownLatch writerLatch = new CountDownLatch( 1 ); diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/PutFromLoadStressTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/PutFromLoadStressTestCase.java new file mode 100644 index 0000000000..50b1c6be41 --- /dev/null +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/PutFromLoadStressTestCase.java @@ -0,0 +1,343 @@ +package org.hibernate.test.cache.infinispan.stress; + +import org.hibernate.Query; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl; +import org.hibernate.cfg.Configuration; +import org.hibernate.cfg.Environment; +import org.hibernate.mapping.Collection; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.test.cache.infinispan.functional.Age; +import org.hibernate.testing.ServiceRegistryBuilder; +import org.infinispan.util.logging.Log; +import org.infinispan.util.logging.LogFactory; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +import javax.transaction.TransactionManager; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.infinispan.test.TestingUtil.withTx; +import static org.junit.Assert.*; + +/** + * A stress test for putFromLoad operations + * + * @author Galder Zamarreño + * @since 4.1 + */ +@Ignore +public class PutFromLoadStressTestCase { + + static final Log log = LogFactory.getLog(PutFromLoadStressTestCase.class); + static final boolean isTrace = log.isTraceEnabled(); + static final int NUM_THREADS = 100; + static final int WARMUP_TIME_SECS = 10; + static final long RUNNING_TIME_SECS = Integer.getInteger("time", 60); + static final long LAUNCH_INTERVAL_MILLIS = 10; + + static final int NUM_INSTANCES = 5000; + + static SessionFactory sessionFactory; + static TransactionManager tm; + + final AtomicBoolean run = new AtomicBoolean(true); + + @BeforeClass + public static void beforeClass() { + Configuration cfg = new Configuration(); + cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true"); + cfg.setProperty(Environment.USE_QUERY_CACHE, "true"); + // TODO: Tweak to have a fully local region factory (no transport, cache mode = local, no marshalling, ...etc) + cfg.setProperty(Environment.CACHE_REGION_FACTORY, + "org.hibernate.cache.infinispan.InfinispanRegionFactory"); + cfg.setProperty(Environment.JTA_PLATFORM, + "org.hibernate.service.jta.platform.internal.JBossStandAloneJtaPlatform"); + + // Force minimal puts off to simplify stressing putFromLoad logic + cfg.setProperty(Environment.USE_MINIMAL_PUTS, "false"); + + // Mappings + configureMappings(cfg); + +// // Database settings +// cfg.setProperty(Environment.DRIVER, "org.postgresql.Driver"); +// cfg.setProperty(Environment.URL, "jdbc:postgresql://localhost/hibernate"); +// cfg.setProperty(Environment.DIALECT, "org.hibernate.dialect.PostgreSQL82Dialect"); +// cfg.setProperty(Environment.USER, "hbadmin"); +// cfg.setProperty(Environment.PASS, "hbadmin"); + + // Create database schema in each run + cfg.setProperty(Environment.HBM2DDL_AUTO, "create-drop"); + + StandardServiceRegistryImpl registry = + ServiceRegistryBuilder.buildServiceRegistry(cfg.getProperties()); + sessionFactory = cfg.buildSessionFactory(registry); + + tm = com.arjuna.ats.jta.TransactionManager.transactionManager(); + } + + private static void configureMappings(Configuration cfg) { + String[] mappings = { + "cache/infinispan/functional/Item.hbm.xml", + "cache/infinispan/functional/Customer.hbm.xml", + "cache/infinispan/functional/Contact.hbm.xml"}; + for (String mapping : mappings) + cfg.addResource("org/hibernate/test/" + mapping); + + Class[] annotatedClasses = getAnnotatedClasses(); + if ( annotatedClasses != null ) { + for ( Class annotatedClass : annotatedClasses ) { + cfg.addAnnotatedClass( annotatedClass ); + } + } + + cfg.buildMappings(); + Iterator it = cfg.getClassMappings(); + String cacheStrategy = "transactional"; + while (it.hasNext()) { + PersistentClass clazz = (PersistentClass) it.next(); + if (!clazz.isInherited()) { + cfg.setCacheConcurrencyStrategy(clazz.getEntityName(), cacheStrategy); + } + } + it = cfg.getCollectionMappings(); + while (it.hasNext()) { + Collection coll = (Collection) it.next(); + cfg.setCollectionCacheConcurrencyStrategy( coll.getRole(), cacheStrategy); + } + } + + @AfterClass + public static void afterClass() { + sessionFactory.close(); + } + + public static Class[] getAnnotatedClasses() { + return new Class[] {Age.class}; + } + + @Test + public void testQueryPerformance() throws Exception { + store(); +// doTest(true); +// run.set(true); // Reset run + doTest(false); + } + + private void store() throws Exception { + for (int i = 0; i < NUM_INSTANCES; i++) { + final Age age = new Age(); + age.setAge(i); + withTx(tm, new Callable() { + @Override + public Void call() throws Exception { + Session s = sessionFactory.openSession(); + s.getTransaction().begin(); + s.persist(age); + s.getTransaction().commit(); + s.close(); + return null; + } + }); + } + } + + private void doTest(boolean warmup) throws Exception { + ExecutorService executor = Executors.newFixedThreadPool(NUM_THREADS); + try { + CyclicBarrier barrier = new CyclicBarrier(NUM_THREADS + 1); + List> futures = new ArrayList>(NUM_THREADS); + for (int i = 0; i < NUM_THREADS; i++) { + Future future = executor.submit( + new SelectQueryRunner(barrier, warmup, i + 1)); + futures.add(future); + Thread.sleep(LAUNCH_INTERVAL_MILLIS); + } + barrier.await(); // wait for all threads to be ready + + long timeout = warmup ? WARMUP_TIME_SECS : RUNNING_TIME_SECS; + TimeUnit unit = TimeUnit.SECONDS; + + Thread.sleep(unit.toMillis(timeout)); // Wait for the duration of the test + run.set(false); // Instruct tests to stop doing work + barrier.await(2, TimeUnit.MINUTES); // wait for all threads to finish + + log.infof("[%s] All threads finished, check for exceptions", title(warmup)); + for (Future future : futures) { + String opsPerMS = future.get(); + if (!warmup) + log.infof("[%s] Operations/ms: %s", title(warmup), opsPerMS); + } + log.infof("[%s] All future gets checked", title(warmup)); + } catch (Exception e) { + log.errorf(e, "Error in one of the execution threads during %s", title(warmup)); + throw e; + } finally { + executor.shutdownNow(); + } + } + + private String title(boolean warmup) { + return warmup ? "warmup" : "stress"; + } + + public class SelectQueryRunner implements Callable { + + final CyclicBarrier barrier; + final boolean warmup; + final Integer customerId; + + public SelectQueryRunner(CyclicBarrier barrier, boolean warmup, Integer customerId) { + this.barrier = barrier; + this.warmup = warmup; + this.customerId = customerId; + } + + @Override + public String call() throws Exception { + try { + if (isTrace) + log.tracef("[%s] Wait for all executions paths to be ready to perform calls", title(warmup)); + barrier.await(); + + long start = System.nanoTime(); + int runs = 0; + if (isTrace) + log.tracef("[%s] Start time: %d", title(warmup), start); + +// while (USE_TIME && PutFromLoadStressTestCase.this.run.get()) { +// if (runs % 100000 == 0) +// log.infof("[%s] Query run # %d", title(warmup), runs); +// +//// Customer customer = query(); +//// deleteCached(customer); + + queryItems(); +// deleteCachedItems(); +// +// runs++; +// } + long end = System.nanoTime(); + long duration = end - start; + if (isTrace) + log.tracef("[%s] End time: %d, duration: %d, runs: %d", + title(warmup), start, duration, runs); + + return opsPerMS(duration, runs); + } finally { + if (isTrace) + log.tracef("[%s] Wait for all execution paths to finish", title(warmup)); + + barrier.await(); + } + } + + private void deleteCachedItems() throws Exception { + withTx(tm, new Callable() { + @Override + public Void call() throws Exception { + sessionFactory.getCache().evictEntityRegion(Age.class); + return null; + } + }); + } + + private void queryItems() throws Exception { + withTx(tm, new Callable() { + @Override + public Void call() throws Exception { + Session s = sessionFactory.getCurrentSession(); + Query query = s.getNamedQuery(Age.QUERY).setCacheable(true); +// Query query = s.createQuery("from Age").setCacheable(true); + List result = (List) query.list(); + assertFalse(result.isEmpty()); + return null; + } + }); + } + + +// private void deleteCachedItems() throws Exception { +// withTx(tm, new Callable() { +// @Override +// public Void call() throws Exception { +// sessionFactory.getCache().evictEntityRegion(Item.class); +// return null; +// } +// }); +// } +// +// private void queryItems() throws Exception { +// withTx(tm, new Callable() { +// @Override +// public Void call() throws Exception { +// Session s = sessionFactory.getCurrentSession(); +// Query query = s.createQuery("from Item").setCacheable(true); +// List result = (List) query.list(); +// assertFalse(result.isEmpty()); +// return null; +// } +// }); +// } + +// private Customer query() throws Exception { +// return withTx(tm, new Callable() { +// @Override +// public Customer call() throws Exception { +// Session s = sessionFactory.getCurrentSession(); +// Customer customer = (Customer) s.load(Customer.class, customerId); +// assertNotNull(customer); +// Set contacts = customer.getContacts(); +// Contact contact = contacts.iterator().next(); +// assertNotNull(contact); +// assertEquals("private contact", contact.getName()); +// +//// Contact found = contacts.isEmpty() ? null : contacts.iterator().next(); +//// Set contacts = found.getContacts(); +//// assertTrue(contacts + " not empty", contacts.isEmpty()); +//// +//// if (found != null && found.hashCode() == System.nanoTime()) { +//// System.out.print(" "); +//// } else if (found == null) { +//// throw new IllegalStateException("Contact cannot be null"); +//// } +// return customer; +// } +// }); +// } + +// private void deleteCached(final Customer customer) throws Exception { +// withTx(tm, new Callable() { +// @Override +// public Void call() throws Exception { +// sessionFactory.getCache().evictEntity(Customer.class, customer.getId()); +// return null; // TODO: Customise this generated block +// } +// }); +// } + + private String opsPerMS(long nanos, int ops) { + long totalMillis = TimeUnit.NANOSECONDS.toMillis(nanos); + if (totalMillis > 0) + return ops / totalMillis + " ops/ms"; + else + return "NAN ops/ms"; + } + + } + + +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/SecondLevelCacheStressTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/SecondLevelCacheStressTestCase.java new file mode 100644 index 0000000000..caca545e0b --- /dev/null +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/SecondLevelCacheStressTestCase.java @@ -0,0 +1,626 @@ +/* + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other + * contributors as indicated by the @author tags. All rights reserved. + * See the copyright.txt in the distribution for a full listing of + * individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ + +package org.hibernate.test.cache.infinispan.stress; + +import org.hibernate.Query; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl; +import org.hibernate.cache.infinispan.InfinispanRegionFactory; +import org.hibernate.cfg.Configuration; +import org.hibernate.cfg.Environment; +import org.hibernate.mapping.Collection; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.test.cache.infinispan.stress.entities.Address; +import org.hibernate.test.cache.infinispan.stress.entities.Family; +import org.hibernate.test.cache.infinispan.stress.entities.Person; +import org.hibernate.testing.ServiceRegistryBuilder; +import org.infinispan.util.concurrent.ConcurrentHashSet; +import org.junit.After; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; + +import javax.transaction.TransactionManager; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Random; +import java.util.concurrent.*; + +import static org.infinispan.test.TestingUtil.withTx; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** + * Stress test for second level cache. + * + * TODO Various: + * - Switch to a JDBC connection pool to avoid too many connections created + * (as well as consuming memory, it's expensive to create) + * - Use barrier associated execution tasks at the beginning and end to track + * down start/end times for runs. + * + * @author Galder Zamarreño + * @since 4.1 + */ +@Ignore +public class SecondLevelCacheStressTestCase { + + static final int NUM_THREADS = 10; + static final long WARMUP_TIME = TimeUnit.SECONDS.toNanos(Integer.getInteger("warmup-time", 1) * 5); + static final long RUNNING_TIME = TimeUnit.SECONDS.toNanos(Integer.getInteger("time", 1) * 60); + static final boolean PROFILE = Boolean.getBoolean("profile"); + static final boolean ALLOCATION = Boolean.getBoolean("allocation"); + static final int RUN_COUNT_LIMIT = Integer.getInteger("count", 1000); // max number of runs per operation + static final Random RANDOM = new Random(12345); + + String provider; + ConcurrentHashSet updatedIds; + Queue removeIds; + SessionFactory sessionFactory; + TransactionManager tm; + volatile int numEntities; + + @Before + public void beforeClass() { + provider = getProvider(); + + updatedIds = new ConcurrentHashSet(); + removeIds = new ConcurrentLinkedQueue(); + + Configuration cfg = new Configuration(); + cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true"); + cfg.setProperty(Environment.USE_QUERY_CACHE, "true"); + configureCache(cfg); + + // Mappings + configureMappings(cfg); + + // Database settings + cfg.setProperty(Environment.DRIVER, "com.mysql.jdbc.Driver"); + cfg.setProperty(Environment.URL, "jdbc:mysql://localhost:3306/hibernate"); + cfg.setProperty(Environment.DIALECT, "org.hibernate.dialect.MySQL5InnoDBDialect"); + cfg.setProperty(Environment.USER, "root"); + cfg.setProperty(Environment.PASS, "password"); + + // Create database schema in each run + cfg.setProperty(Environment.HBM2DDL_AUTO, "create-drop"); + + StandardServiceRegistryImpl registry = + ServiceRegistryBuilder.buildServiceRegistry(cfg.getProperties()); + sessionFactory = cfg.buildSessionFactory(registry); + + tm = com.arjuna.ats.jta.TransactionManager.transactionManager(); + } + + protected String getProvider() { + return "infinispan"; + } + + protected void configureCache(Configuration cfg) { + cfg.setProperty(Environment.CACHE_REGION_FACTORY, + "org.hibernate.cache.infinispan.InfinispanRegionFactory"); + cfg.setProperty(Environment.JTA_PLATFORM, + "org.hibernate.service.jta.platform.internal.JBossStandAloneJtaPlatform"); + cfg.setProperty(InfinispanRegionFactory.INFINISPAN_CONFIG_RESOURCE_PROP, + "stress-local-infinispan.xml"); + } + + @After + public void afterClass() { + sessionFactory.close(); + } + + @Test + public void testEntityLifecycle() throws InterruptedException { + if (!PROFILE) { + System.out.printf("[provider=%s] Warming up\n", provider); + doEntityLifecycle(true); + + // Recreate session factory cleaning everything + afterClass(); + beforeClass(); + } + + System.out.printf("[provider=%s] Testing...\n", provider); + doEntityLifecycle(false); + } + + void doEntityLifecycle(boolean isWarmup) { + long runningTimeout = isWarmup ? WARMUP_TIME : RUNNING_TIME; + TotalStats insertPerf = runEntityInsert(runningTimeout); + numEntities = countEntities().intValue(); + printResult(isWarmup, "[provider=%s] Inserts/s %10.2f (%d entities)\n", + provider, insertPerf.getOpsPerSec("INSERT"), numEntities); + + TotalStats updatePerf = runEntityUpdate(runningTimeout); + List updateIdsSeq = new ArrayList(updatedIds); + printResult(isWarmup, "[provider=%s] Updates/s %10.2f (%d updates)\n", + provider, updatePerf.getOpsPerSec("UPDATE"), updateIdsSeq.size()); + + TotalStats findUpdatedPerf = + runEntityFindUpdated(runningTimeout, updateIdsSeq); + printResult(isWarmup, "[provider=%s] Updated finds/s %10.2f\n", + provider, findUpdatedPerf.getOpsPerSec("FIND_UPDATED")); + + TotalStats findQueryPerf = runEntityFindQuery(runningTimeout, isWarmup); + printResult(isWarmup, "[provider=%s] Query finds/s %10.2f\n", + provider, findQueryPerf.getOpsPerSec("FIND_QUERY")); + + TotalStats findRandomPerf = runEntityFindRandom(runningTimeout); + printResult(isWarmup, "[provider=%s] Random finds/s %10.2f\n", + provider, findRandomPerf.getOpsPerSec("FIND_RANDOM")); + + // Get all entity ids + List entityIds = new ArrayList(); + for (int i = 1; i <= numEntities; i++) entityIds.add(i); + + // Shuffle them + Collections.shuffle(entityIds); + + // Add them to the queue delete consumption + removeIds.addAll(entityIds); + + TotalStats deletePerf = runEntityDelete(runningTimeout); + printResult(isWarmup, "[provider=%s] Deletes/s %10.2f\n", + provider, deletePerf.getOpsPerSec("DELETE")); + + // TODO Print 2LC statistics... + } + + static void printResult(boolean isWarmup, String format, Object... args) { + if (!isWarmup) System.out.printf(format, args); + } + + Long countEntities() { + try { + return withTx(tm, new Callable() { + @Override + public Long call() throws Exception { + Session s = sessionFactory.openSession(); + Query query = s.createQuery("select count(*) from Family"); + Object result = query.list().get(0); + s.close(); + return (Long) result; + } + }); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + + TotalStats runEntityInsert(long runningTimeout) { + return runSingleWork(runningTimeout, "insert", insertOperation()); + } + + TotalStats runEntityUpdate(long runningTimeout) { + return runSingleWork(runningTimeout, "update", updateOperation()); + } + + TotalStats runEntityFindUpdated(long runningTimeout, + List updatedIdsSeq) { + return runSingleWork(runningTimeout, "find-updated", findUpdatedOperation(updatedIdsSeq)); + } + + TotalStats runEntityFindQuery(long runningTimeout, boolean warmup) { + return runSingleWork(runningTimeout, "find-query", findQueryOperation(warmup)); + } + + TotalStats runEntityFindRandom(long runningTimeout) { + return runSingleWork(runningTimeout, "find-random", findRandomOperation()); + } + + TotalStats runEntityDelete(long runningTimeout) { + return runSingleWork(runningTimeout, "remove", deleteOperation()); + } + + TotalStats runSingleWork(long runningTimeout, final String name, Operation op) { + final TotalStats perf = new TotalStats(); + + ExecutorService exec = Executors.newFixedThreadPool( + NUM_THREADS, new ThreadFactory() { + volatile int i = 0; + @Override + public Thread newThread(Runnable r) { + return new Thread(new ThreadGroup(name), + r, "worker-" + name + "-" + i++); + } + }); + + try { + List> futures = new ArrayList>(NUM_THREADS); + CyclicBarrier barrier = new CyclicBarrier(NUM_THREADS + 1); + + for (int i = 0; i < NUM_THREADS; i++) + futures.add(exec.submit( + new WorkerThread(runningTimeout, perf, op, barrier))); + + try { + barrier.await(); // wait for all threads to be ready + barrier.await(); // wait for all threads to finish + + // Now check whether anything went wrong... + for (Future future : futures) future.get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + + return perf; + } finally { + exec.shutdown(); + } + } + + T captureThrowables(Callable task) throws Exception { + try { + return task.call(); + } catch (Throwable t) { + t.printStackTrace(); + if (t instanceof Exception) + throw (Exception) t; + else + throw new RuntimeException(t); + } + } + + Operation insertOperation() { + return new Operation("INSERT") { + @Override + boolean call(final int run) throws Exception { + return captureThrowables(new Callable() { + @Override + public Boolean call() throws Exception { + return withTx(tm, new Callable() { + @Override + public Boolean call() throws Exception { + Session s = sessionFactory.openSession(); + s.getTransaction().begin(); + + String name = "Zamarreño-" + run; + Family family = new Family(name); + s.persist(family); + + s.getTransaction().commit(); + s.close(); + return true; + } + }); + } + }); + } + }; + } + + Operation updateOperation() { + return new Operation("UPDATE") { + @Override + boolean call(final int run) throws Exception { + return captureThrowables(new Callable() { + @Override + public Boolean call() throws Exception { + return withTx(tm, new Callable() { + @Override + public Boolean call() throws Exception { + Session s = sessionFactory.openSession(); + s.getTransaction().begin(); + + // Update random entity that has been inserted + int id = RANDOM.nextInt(numEntities) + 1; + Family family = (Family) s.load(Family.class, id); + String newSecondName = "Arrizabalaga-" + run; + family.setSecondName(newSecondName); + + s.getTransaction().commit(); + s.close(); + // Cache updated entities for later read + updatedIds.add(id); + return true; + } + }); + } + }); + } + }; + } + + Operation findUpdatedOperation(final List updatedIdsSeq) { + return new Operation("FIND_UPDATED") { + @Override + boolean call(final int run) throws Exception { + return captureThrowables(new Callable() { + @Override + public Boolean call() throws Exception { + Session s = sessionFactory.openSession(); + + int id = updatedIdsSeq.get(RANDOM.nextInt( + updatedIdsSeq.size())); + Family family = (Family) s.load(Family.class, id); + String secondName = family.getSecondName(); + assertNotNull(secondName); + assertTrue("Second name not expected: " + secondName, + secondName.startsWith("Arrizabalaga")); + + s.close(); + return true; + } + }); + } + }; + } + + private Operation findQueryOperation(final boolean isWarmup) { + return new Operation("FIND_QUERY") { + @Override + boolean call(final int run) throws Exception { + return captureThrowables(new Callable() { + @Override + public Boolean call() throws Exception { + Session s = sessionFactory.openSession(); + + Query query = s.createQuery("from Family") + .setCacheable(true); + int maxResults = isWarmup ? 10 : 100; + query.setMaxResults(maxResults); + List result = (List) query.list(); + assertEquals(maxResults, result.size()); + + s.close(); + return true; + } + }); + } + }; + } + + private Operation findRandomOperation() { + return new Operation("FIND_RANDOM") { + @Override + boolean call(final int run) throws Exception { + return captureThrowables(new Callable() { + @Override + public Boolean call() throws Exception { + Session s = sessionFactory.openSession(); + + int id = RANDOM.nextInt(numEntities) + 1; + Family family = (Family) s.load(Family.class, id); + String familyName = family.getName(); + // Skip ñ check in order to avoid issues... + assertTrue("Unexpected family: " + familyName , + familyName.startsWith("Zamarre")); + + s.close(); + return true; + } + }); + } + }; + } + + private Operation deleteOperation() { + return new Operation("DELETE") { + @Override + boolean call(final int run) throws Exception { + return captureThrowables(new Callable() { + @Override + public Boolean call() throws Exception { + return withTx(tm, new Callable() { + @Override + public Boolean call() throws Exception { + Session s = sessionFactory.openSession(); + s.getTransaction().begin(); + + // Get each id and remove it + int id = removeIds.poll(); + Family family = (Family) s.load(Family.class, id); + String familyName = family.getName(); + // Skip ñ check in order to avoid issues... + assertTrue("Unexpected family: " + familyName , + familyName.startsWith("Zamarre")); + s.delete(family); + + s.getTransaction().commit(); + s.close(); + + return true; + } + }); + } + }); + } + }; + } + + public static Class[] getAnnotatedClasses() { + return new Class[] {Family.class, Person.class, Address.class}; + } + + private static void configureMappings(Configuration cfg) { + Class[] annotatedClasses = getAnnotatedClasses(); + if ( annotatedClasses != null ) { + for ( Class annotatedClass : annotatedClasses ) { + cfg.addAnnotatedClass( annotatedClass ); + } + } + + cfg.buildMappings(); + Iterator it = cfg.getClassMappings(); + String cacheStrategy = "transactional"; + while (it.hasNext()) { + PersistentClass clazz = (PersistentClass) it.next(); + if (!clazz.isInherited()) { + cfg.setCacheConcurrencyStrategy(clazz.getEntityName(), cacheStrategy); + } + } + it = cfg.getCollectionMappings(); + while (it.hasNext()) { + Collection coll = (Collection) it.next(); + cfg.setCollectionCacheConcurrencyStrategy(coll.getRole(), cacheStrategy); + } + } + + + private static abstract class Operation { + final String name; + + Operation(String name) { + this.name = name; + } + + abstract boolean call(int run) throws Exception; + + } + + private class WorkerThread implements Callable { + private final long runningTimeout; + private final TotalStats perf; + private final Operation op; + private final CyclicBarrier barrier; + + public WorkerThread(long runningTimeout, TotalStats perf, + Operation op, CyclicBarrier barrier) { + this.runningTimeout = runningTimeout; + this.perf = perf; + this.op = op; + this.barrier = barrier; + } + + @Override + public Void call() throws Exception { + // TODO: Extend barrier to capture start time + barrier.await(); + try { + long startNanos = System.nanoTime(); + long endNanos = startNanos + runningTimeout; + int runs = 0; + long missCount = 0; + while (callOperation(endNanos, runs)) { + boolean hit = op.call(runs); + if (!hit) missCount++; + runs++; + } + + // TODO: Extend barrier to capture end time + perf.addStats(op.name, runs, + System.nanoTime() - startNanos, missCount); + } finally { + barrier.await(); + } + return null; + } + + private boolean callOperation(long endNanos, int runs) { + if (ALLOCATION) { + return runs < RUN_COUNT_LIMIT; + } else { + return (runs & 0x400) != 0 || System.nanoTime() < endNanos; + } + } + } + + private static class TotalStats { + private ConcurrentHashMap statsMap = + new ConcurrentHashMap(); + + public void addStats(String opName, long opCount, + long runningTime, long missCount) { + OpStats s = new OpStats(opName, opCount, runningTime, missCount); + OpStats old = statsMap.putIfAbsent(opName, s); + boolean replaced = old == null; + while (!replaced) { + old = statsMap.get(opName); + s = new OpStats(old, opCount, runningTime, missCount); + replaced = statsMap.replace(opName, old, s); + } + } + + public double getOpsPerSec(String opName) { + OpStats s = statsMap.get(opName); + if (s == null) return 0; + return s.opCount * 1000000000. / s.runningTime * s.threadCount; + } + + public double getTotalOpsPerSec() { + long totalOpCount = 0; + long totalRunningTime = 0; + long totalThreadCount = 0; + for (Map.Entry e : statsMap.entrySet()) { + OpStats s = e.getValue(); + totalOpCount += s.opCount; + totalRunningTime += s.runningTime; + totalThreadCount += s.threadCount; + } + return totalOpCount * 1000000000. / totalRunningTime * totalThreadCount; + } + + public double getHitRatio(String opName) { + OpStats s = statsMap.get(opName); + if (s == null) return 0; + return 1 - 1. * s.missCount / s.opCount; + } + + public double getTotalHitRatio() { + long totalOpCount = 0; + long totalMissCount = 0; + for (Map.Entry e : statsMap.entrySet()) { + OpStats s = e.getValue(); + totalOpCount += s.opCount; + totalMissCount += s.missCount; + } + return 1 - 1. * totalMissCount / totalOpCount; + } + } + + private static class OpStats { + public final String opName; + public final int threadCount; + public final long opCount; + public final long runningTime; + public final long missCount; + + private OpStats(String opName, long opCount, + long runningTime, long missCount) { + this.opName = opName; + this.threadCount = 1; + this.opCount = opCount; + this.runningTime = runningTime; + this.missCount = missCount; + } + + private OpStats(OpStats base, long opCount, + long runningTime, long missCount) { + this.opName = base.opName; + this.threadCount = base.threadCount + 1; + this.opCount = base.opCount + opCount; + this.runningTime = base.runningTime + runningTime; + this.missCount = base.missCount + missCount; + } + } + +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Address.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Address.java new file mode 100644 index 0000000000..004996544b --- /dev/null +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Address.java @@ -0,0 +1,216 @@ +/* + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other + * contributors as indicated by the @author tags. All rights reserved. + * See the copyright.txt in the distribution for a full listing of + * individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ + +package org.hibernate.test.cache.infinispan.stress.entities; + +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; + +@Entity +public final class Address { + + @Id + @GeneratedValue + private int id; + private int streetNumber; + private String streetName; + private String cityName; + private String countryName; + private String zipCode; + @OneToMany + private Set inhabitants; + private int version; + + public Address(int streetNumber, String streetName, String cityName, String countryName) { + this.streetNumber = streetNumber; + this.streetName = streetName; + this.cityName = cityName; + this.countryName = countryName; + this.zipCode = null; + this.inhabitants = new HashSet(); + this.id = 0; + this.version = 0; + } + + protected Address() { + this.streetNumber = 0; + this.streetName = null; + this.cityName = null; + this.countryName = null; + this.zipCode = null; + this.inhabitants = new HashSet(); + this.id = 0; + this.version = 0; + } + + public int getStreetNumber() { + return streetNumber; + } + + public String getStreetName() { + return streetName; + } + + public String getCityName() { + return cityName; + } + + public String getCountryName() { + return countryName; + } + + public String getZipCode() { + return zipCode; + } + + public void setZipCode(String zipCode) { + this.zipCode = zipCode; + } + + public Set getInhabitants() { + return inhabitants; + } + + public boolean addInhabitant(Person inhabitant) { + boolean done = false; + if (inhabitants.add(inhabitant)) { + inhabitant.setAddress(this); + done = true; + } + return done; + } + + public boolean remInhabitant(Person inhabitant) { + boolean done = false; + if (inhabitants.remove(inhabitant)) { + inhabitant.setAddress(null); + done = true; + } + return done; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getVersion() { + return version; + } + + protected void removeAllInhabitants() { + // inhabitants relation is not CASCADED, we must delete the relation on other side by ourselves + for (Iterator iterator = inhabitants.iterator(); iterator.hasNext(); ) { + Person p = iterator.next(); + p.setAddress(null); + } + } + + protected void setStreetNumber(int streetNumber) { + this.streetNumber = streetNumber; + } + + protected void setStreetName(String streetName) { + this.streetName = streetName; + } + + protected void setCityName(String cityName) { + this.cityName = cityName; + } + + protected void setCountryName(String countryName) { + this.countryName = countryName; + } + + protected void setInhabitants(Set inhabitants) { + if (inhabitants == null) { + this.inhabitants = new HashSet(); + } else { + this.inhabitants = inhabitants; + } + } + + protected void setVersion(Integer version) { + this.version = version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Address address = (Address) o; + + if (id != address.id) return false; + if (streetNumber != address.streetNumber) return false; + if (version != address.version) return false; + if (cityName != null ? !cityName.equals(address.cityName) : address.cityName != null) + return false; + if (countryName != null ? !countryName.equals(address.countryName) : address.countryName != null) + return false; + if (inhabitants != null ? !inhabitants.equals(address.inhabitants) : address.inhabitants != null) + return false; + if (streetName != null ? !streetName.equals(address.streetName) : address.streetName != null) + return false; + if (zipCode != null ? !zipCode.equals(address.zipCode) : address.zipCode != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = streetNumber; + result = 31 * result + (streetName != null ? streetName.hashCode() : 0); + result = 31 * result + (cityName != null ? cityName.hashCode() : 0); + result = 31 * result + (countryName != null ? countryName.hashCode() : 0); + result = 31 * result + (zipCode != null ? zipCode.hashCode() : 0); + result = 31 * result + (inhabitants != null ? inhabitants.hashCode() : 0); + result = 31 * result + id; + result = 31 * result + version; + return result; + } + + @Override + public String toString() { + return "Address{" + + "cityName='" + cityName + '\'' + + ", streetNumber=" + streetNumber + + ", streetName='" + streetName + '\'' + + ", countryName='" + countryName + '\'' + + ", zipCode='" + zipCode + '\'' + + ", inhabitants=" + inhabitants + + ", id=" + id + + ", version=" + version + + '}'; + } + +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Family.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Family.java new file mode 100644 index 0000000000..805f1dac34 --- /dev/null +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Family.java @@ -0,0 +1,149 @@ +/* + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other + * contributors as indicated by the @author tags. All rights reserved. + * See the copyright.txt in the distribution for a full listing of + * individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ + +package org.hibernate.test.cache.infinispan.stress.entities; + +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import java.util.HashSet; +import java.util.Set; + +@Entity +public final class Family { + + @Id + @GeneratedValue + private int id; + private String name; + private String secondName; + @OneToMany + private Set members; + private int version; + + public Family(String name) { + this.name = name; + this.secondName = null; + this.members = new HashSet(); + this.id = 0; + this.version = 0; + } + + protected Family() { + this.name = null; + this.secondName = null; + this.members = new HashSet(); + this.id = 0; + this.version = 0; + } + + public String getName() { + return name; + } + + public Set getMembers() { + return members; + } + + public String getSecondName() { + return secondName; + } + + public void setSecondName(String secondName) { + this.secondName = secondName; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getVersion() { + return version; + } + + public void setName(String name) { + this.name = name; + } + + public void setMembers(Set members) { + if (members == null) { + this.members = new HashSet(); + } else { + this.members = members; + } + } + + public void setVersion(Integer version) { + this.version = version; + } + + boolean addMember(Person member) { + return members.add(member); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Family family = (Family) o; + + if (id != family.id) return false; + if (version != family.version) return false; + if (members != null ? !members.equals(family.members) : family.members != null) + return false; + if (name != null ? !name.equals(family.name) : family.name != null) + return false; + if (secondName != null ? !secondName.equals(family.secondName) : family.secondName != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = name != null ? name.hashCode() : 0; + result = 31 * result + (secondName != null ? secondName.hashCode() : 0); + result = 31 * result + (members != null ? members.hashCode() : 0); + result = 31 * result + id; + result = 31 * result + version; + return result; + } + + @Override + public String toString() { + return "Family{" + + "id=" + id + + ", name='" + name + '\'' + + ", secondName='" + secondName + '\'' + + ", members=" + members + + ", version=" + version + + '}'; + } + +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Person.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Person.java new file mode 100644 index 0000000000..cf105ff723 --- /dev/null +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/stress/entities/Person.java @@ -0,0 +1,178 @@ +/* + * JBoss, Home of Professional Open Source + * Copyright 2012 Red Hat Inc. and/or its affiliates and other + * contributors as indicated by the @author tags. All rights reserved. + * See the copyright.txt in the distribution for a full listing of + * individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ + +package org.hibernate.test.cache.infinispan.stress.entities; + +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.ManyToOne; +import java.util.Date; + +@Entity +public class Person { + + @Id + @GeneratedValue + private int id; + private String firstName; + @ManyToOne + private Family family; + private Date birthDate; + @ManyToOne + private Address address; + private boolean checked; + private int version; + + public Person(String firstName, Family family) { + this.firstName = firstName; + this.family = family; + this.birthDate = null; + this.address = null; + this.checked = false; + this.id = 0; + this.version = 0; + this.family.addMember(this); + } + + protected Person() { + this.firstName = null; + this.family = null; + this.birthDate = null; + this.address = null; + this.checked = false; + this.id = 0; + this.version = 0; + } + + public String getFirstName() { + return firstName; + } + + public Family getFamily() { + return family; + } + + public Date getBirthDate() { + return birthDate; + } + + public void setBirthDate(Date birthDate) { + this.birthDate = birthDate; + } + + public Address getAddress() { + return address; + } + + public void setAddress(Address address) { + // To skip Hibernate BUG with access.PROPERTY : the rest should be done in DAO + // this.address = address; + // Hibernate BUG : if we update the relation on 2 sides + if (this.address != address) { + if (this.address != null) this.address.remInhabitant(this); + this.address = address; + if (this.address != null) this.address.addInhabitant(this); + } + } + + public boolean isChecked() { + return checked; + } + + public void setChecked(boolean checked) { + this.checked = checked; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getVersion() { + return version; + } + + protected void setFirstName(String firstName) { + this.firstName = firstName; + } + + protected void setFamily(Family family) { + this.family = family; + } + + protected void setVersion(Integer version) { + this.version = version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Person person = (Person) o; + + if (checked != person.checked) return false; + if (id != person.id) return false; + if (version != person.version) return false; + if (address != null ? !address.equals(person.address) : person.address != null) + return false; + if (birthDate != null ? !birthDate.equals(person.birthDate) : person.birthDate != null) + return false; + if (family != null ? !family.equals(person.family) : person.family != null) + return false; + if (firstName != null ? !firstName.equals(person.firstName) : person.firstName != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = firstName != null ? firstName.hashCode() : 0; + result = 31 * result + (family != null ? family.hashCode() : 0); + result = 31 * result + (birthDate != null ? birthDate.hashCode() : 0); + result = 31 * result + (address != null ? address.hashCode() : 0); + result = 31 * result + (checked ? 1 : 0); + result = 31 * result + id; + result = 31 * result + version; + return result; + } + + @Override + public String toString() { + return "Person{" + + "address=" + address + + ", firstName='" + firstName + '\'' + + ", family=" + family + + ", birthDate=" + birthDate + + ", checked=" + checked + + ", id=" + id + + ", version=" + version + + '}'; + } + +} diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/timestamp/TimestampsRegionImplTestCase.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/timestamp/TimestampsRegionImplTestCase.java index 6143c0af6b..daf4047b7b 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/timestamp/TimestampsRegionImplTestCase.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/timestamp/TimestampsRegionImplTestCase.java @@ -25,7 +25,22 @@ package org.hibernate.test.cache.infinispan.timestamp; import java.util.Properties; +import org.hibernate.boot.registry.StandardServiceRegistryBuilder; +import org.hibernate.cache.infinispan.InfinispanRegionFactory; +import org.hibernate.cache.infinispan.impl.ClassLoaderAwareCache; +import org.hibernate.cache.infinispan.timestamp.TimestampsRegionImpl; +import org.hibernate.cache.spi.CacheDataDescription; +import org.hibernate.cache.spi.Region; +import org.hibernate.cache.spi.UpdateTimestampsCache; +import org.hibernate.cfg.Configuration; +import org.hibernate.test.cache.infinispan.AbstractGeneralDataRegionTestCase; +import org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase; +import org.hibernate.test.cache.infinispan.functional.classloader.Account; +import org.hibernate.test.cache.infinispan.functional.classloader.AccountHolder; +import org.hibernate.test.cache.infinispan.functional.classloader.SelectedClassnameClassLoader; +import org.hibernate.test.cache.infinispan.util.CacheTestUtil; import org.infinispan.AdvancedCache; +import org.infinispan.context.Flag; import org.infinispan.notifications.Listener; import org.infinispan.notifications.cachelistener.annotation.CacheEntryActivated; import org.infinispan.notifications.cachelistener.annotation.CacheEntryCreated; @@ -38,24 +53,6 @@ import org.infinispan.notifications.cachelistener.annotation.CacheEntryRemoved; import org.infinispan.notifications.cachelistener.annotation.CacheEntryVisited; import org.infinispan.notifications.cachelistener.event.Event; -import org.hibernate.boot.registry.StandardServiceRegistryBuilder; -import org.hibernate.cache.infinispan.InfinispanRegionFactory; -import org.hibernate.cache.infinispan.impl.ClassLoaderAwareCache; -import org.hibernate.cache.infinispan.timestamp.TimestampsRegionImpl; -import org.hibernate.cache.infinispan.util.CacheAdapter; -import org.hibernate.cache.infinispan.util.CacheAdapterImpl; -import org.hibernate.cache.infinispan.util.FlagAdapter; -import org.hibernate.cache.spi.CacheDataDescription; -import org.hibernate.cache.spi.Region; -import org.hibernate.cache.spi.UpdateTimestampsCache; -import org.hibernate.cfg.Configuration; - -import org.hibernate.test.cache.infinispan.AbstractGeneralDataRegionTestCase; -import org.hibernate.test.cache.infinispan.functional.classloader.Account; -import org.hibernate.test.cache.infinispan.functional.classloader.AccountHolder; -import org.hibernate.test.cache.infinispan.functional.classloader.SelectedClassnameClassLoader; -import org.hibernate.test.cache.infinispan.util.CacheTestUtil; - /** * Tests of TimestampsRegionImpl. * @@ -75,8 +72,8 @@ public class TimestampsRegionImplTestCase extends AbstractGeneralDataRegionTestC } @Override - protected CacheAdapter getInfinispanCache(InfinispanRegionFactory regionFactory) { - return CacheAdapterImpl.newInstance(regionFactory.getCacheManager().getCache("timestamps").getAdvancedCache()); + protected AdvancedCache getInfinispanCache(InfinispanRegionFactory regionFactory) { + return regionFactory.getCacheManager().getCache("timestamps").getAdvancedCache(); } public void testClearTimestampsRegionInIsolated() throws Exception { @@ -108,7 +105,7 @@ public class TimestampsRegionImplTestCase extends AbstractGeneralDataRegionTestC Account acct = new Account(); acct.setAccountHolder(new AccountHolder()); - region.getCacheAdapter().withFlags(FlagAdapter.FORCE_SYNCHRONOUS).put(acct, "boo"); + region.getCache().withFlags(Flag.FORCE_SYNCHRONOUS).put(acct, "boo"); // region.put(acct, "boo"); // @@ -126,7 +123,15 @@ public class TimestampsRegionImplTestCase extends AbstractGeneralDataRegionTestC return CacheTestUtil.buildConfiguration("test", MockInfinispanRegionFactory.class, false, true); } - public static class MockInfinispanRegionFactory extends InfinispanRegionFactory { + public static class MockInfinispanRegionFactory extends SingleNodeTestCase.TestInfinispanRegionFactory { + + public MockInfinispanRegionFactory() { + } + +// @Override +// protected TimestampsRegionImpl createTimestampsRegion(CacheAdapter cacheAdapter, String regionName) { +// return new MockTimestampsRegionImpl(cacheAdapter, regionName, getTransactionManager(), this); +// } @Override protected AdvancedCache createCacheWrapper(AdvancedCache cache) { @@ -138,20 +143,7 @@ public class TimestampsRegionImplTestCase extends AbstractGeneralDataRegionTestC }; } - // @Override -// protected EmbeddedCacheManager createCacheManager(Properties properties) throws CacheException { -// try { -// EmbeddedCacheManager manager = new DefaultCacheManager(InfinispanRegionFactory.DEF_INFINISPAN_CONFIG_RESOURCE); -// org.infinispan.config.Configuration ispnCfg = new org.infinispan.config.Configuration(); -// ispnCfg.setCacheMode(org.infinispan.config.Configuration.CacheMode.REPL_SYNC); -// manager.defineConfiguration("timestamps", ispnCfg); -// return manager; -// } catch (IOException e) { -// throw new CacheException("Unable to create default cache manager", e); -// } -// } - - @Listener + @Listener public static class MockClassLoaderAwareListener extends ClassLoaderAwareCache.ClassLoaderAwareListener { MockClassLoaderAwareListener(Object listener, ClassLoaderAwareCache cache) { super(listener, cache); diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/JBossStandaloneJtaExampleTest.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/JBossStandaloneJtaExampleTest.java index d0258ba3d4..7f40cbf711 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/JBossStandaloneJtaExampleTest.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/JBossStandaloneJtaExampleTest.java @@ -23,8 +23,12 @@ */ package org.hibernate.test.cache.infinispan.tm; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + import java.util.Iterator; import java.util.Properties; + import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.Name; @@ -34,6 +38,20 @@ import javax.naming.StringRefAddr; import javax.transaction.Status; import javax.transaction.UserTransaction; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.cfg.AvailableSettings; +import org.hibernate.cfg.Configuration; +import org.hibernate.cfg.Environment; +import org.hibernate.engine.transaction.jta.platform.internal.JBossStandAloneJtaPlatform; +import org.hibernate.mapping.Collection; +import org.hibernate.mapping.PersistentClass; +import org.hibernate.service.ServiceRegistry; +import org.hibernate.stat.Statistics; +import org.hibernate.test.cache.infinispan.functional.Item; +import org.hibernate.testing.ServiceRegistryBuilder; +import org.hibernate.testing.jta.JtaAwareConnectionProviderImpl; +import org.infinispan.configuration.cache.ConfigurationBuilder; import org.infinispan.transaction.lookup.JBossStandaloneJTAManagerLookup; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; @@ -45,23 +63,6 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.cfg.AvailableSettings; -import org.hibernate.cfg.Configuration; -import org.hibernate.cfg.Environment; -import org.hibernate.mapping.Collection; -import org.hibernate.mapping.PersistentClass; -import org.hibernate.service.ServiceRegistry; -import org.hibernate.engine.transaction.jta.platform.internal.JBossStandAloneJtaPlatform; -import org.hibernate.stat.Statistics; -import org.hibernate.test.cache.infinispan.functional.Item; -import org.hibernate.testing.ServiceRegistryBuilder; -import org.hibernate.testing.jta.JtaAwareConnectionProviderImpl; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - /** * This is an example test based on http://community.jboss.org/docs/DOC-14617 that shows how to interact with * Hibernate configured with Infinispan second level cache provider using JTA transactions. @@ -85,7 +86,7 @@ public class JBossStandaloneJtaExampleTest { jndiServer = startJndiServer(); ctx = createJndiContext(); // Inject configuration to initialise transaction manager from config classloader - lookup.init( new org.infinispan.config.Configuration() ); + lookup.init(new ConfigurationBuilder().build()); bindTransactionManager(); bindUserTransaction(); } @@ -93,6 +94,8 @@ public class JBossStandaloneJtaExampleTest { @After public void tearDown() throws Exception { try { + unbind("UserTransaction", ctx); + unbind("java:/TransactionManager", ctx); ctx.close(); jndiServer.stop(); } @@ -250,7 +253,8 @@ public class JBossStandaloneJtaExampleTest { cfg.setProperty(Environment.RELEASE_CONNECTIONS, "auto"); cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true"); cfg.setProperty(Environment.USE_QUERY_CACHE, "true"); - cfg.setProperty(Environment.CACHE_REGION_FACTORY, "org.hibernate.cache.infinispan.InfinispanRegionFactory"); + cfg.setProperty(Environment.CACHE_REGION_FACTORY, + "org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase$TestInfinispanRegionFactory"); Properties envProps = Environment.getProperties(); envProps.put(AvailableSettings.JTA_PLATFORM, new JBossStandAloneJtaPlatform()); diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/XaTransactionImpl.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/XaTransactionImpl.java index 595911076b..5e16a7bd24 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/XaTransactionImpl.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/tm/XaTransactionImpl.java @@ -137,7 +137,8 @@ public class XaTransactionImpl implements Transaction { if (synchronizations != null) { for (int i = 0; i < synchronizations.size(); i++) { Synchronization s = (Synchronization) synchronizations.get(i); - s.afterCompletion(status); + if (s != null) + s.afterCompletion(status); } } diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestSupport.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestSupport.java index 4ab93dc12e..bfc1d85ff4 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestSupport.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestSupport.java @@ -68,7 +68,6 @@ public class CacheTestSupport { } public void setUp() throws Exception { - // Try to ensure we use IPv4; otherwise cluster formation is very slow preferIPv4Stack = System.getProperty(PREFER_IPV4STACK); System.setProperty(PREFER_IPV4STACK, "true"); @@ -78,7 +77,6 @@ public class CacheTestSupport { } public void tearDown() throws Exception { - if (preferIPv4Stack == null) System.clearProperty(PREFER_IPV4STACK); else diff --git a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestUtil.java b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestUtil.java index 500bba828d..5565b43742 100644 --- a/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestUtil.java +++ b/hibernate-infinispan/src/test/java/org/hibernate/test/cache/infinispan/util/CacheTestUtil.java @@ -23,17 +23,8 @@ */ package org.hibernate.test.cache.infinispan.util; -import java.util.Enumeration; -import java.util.HashSet; import java.util.Properties; -import java.util.Set; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; - -import org.hibernate.SessionFactory; -import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.infinispan.InfinispanRegionFactory; import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.Configuration; @@ -41,6 +32,7 @@ import org.hibernate.cfg.Environment; import org.hibernate.cfg.Settings; import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.service.ServiceRegistry; +import org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase; /** * Utilities for cache testing. @@ -49,7 +41,8 @@ import org.hibernate.service.ServiceRegistry; */ public class CacheTestUtil { - public static Configuration buildConfiguration(String regionPrefix, Class regionFactory, boolean use2ndLevel, boolean useQueries) { + public static Configuration buildConfiguration(String regionPrefix, + Class regionFactory, boolean use2ndLevel, boolean useQueries) { Configuration cfg = new Configuration(); cfg.setProperty(Environment.GENERATE_STATISTICS, "true"); cfg.setProperty(Environment.USE_STRUCTURED_CACHE, "true"); @@ -63,72 +56,46 @@ public class CacheTestUtil { return cfg; } - public static Configuration buildLocalOnlyConfiguration(String regionPrefix, boolean use2ndLevel, boolean useQueries) { - Configuration cfg = buildConfiguration(regionPrefix, InfinispanRegionFactory.class, use2ndLevel, useQueries); - cfg.setProperty(InfinispanRegionFactory.INFINISPAN_CONFIG_RESOURCE_PROP, - InfinispanRegionFactory.DEF_INFINISPAN_CONFIG_RESOURCE); - return cfg; - } - public static Configuration buildCustomQueryCacheConfiguration(String regionPrefix, String queryCacheName) { Configuration cfg = buildConfiguration(regionPrefix, InfinispanRegionFactory.class, true, true); cfg.setProperty(InfinispanRegionFactory.QUERY_CACHE_RESOURCE_PROP, queryCacheName); return cfg; } + public static InfinispanRegionFactory startRegionFactory(ServiceRegistry reg, + Configuration cfg){ + try { + Settings settings = cfg.buildSettings(reg); + Properties properties = cfg.getProperties(); + String factoryType = cfg.getProperty(Environment.CACHE_REGION_FACTORY); + Class clazz = Thread.currentThread() + .getContextClassLoader().loadClass(factoryType); + InfinispanRegionFactory regionFactory; + if (clazz == InfinispanRegionFactory.class) { + regionFactory = new SingleNodeTestCase.TestInfinispanRegionFactory(); + } else { + regionFactory = (InfinispanRegionFactory) clazz.newInstance(); + } + regionFactory.start(settings, properties); + return regionFactory; + } catch (Exception e) { + throw new RuntimeException(e); + } + } - public static InfinispanRegionFactory startRegionFactory( - ServiceRegistry serviceRegistry, - Configuration cfg, - CacheTestSupport testSupport) throws ClassNotFoundException, InstantiationException, IllegalAccessException { - - SessionFactoryImplementor sessionFactory =(SessionFactoryImplementor) cfg.buildSessionFactory( serviceRegistry ); - InfinispanRegionFactory factory = (InfinispanRegionFactory) sessionFactory.getServiceRegistry().getService( RegionFactory.class ); + public static InfinispanRegionFactory startRegionFactory(ServiceRegistry reg, + Configuration cfg, CacheTestSupport testSupport) { + SessionFactoryImplementor sessionFactory =(SessionFactoryImplementor) cfg.buildSessionFactory( reg ); + InfinispanRegionFactory factory = startRegionFactory(reg, cfg); testSupport.registerFactory(factory, sessionFactory); return factory; } - public static void stopRegionFactory(InfinispanRegionFactory factory, CacheTestSupport testSupport) { - testSupport.unregisterFactory(factory).close(); - } - - /** - * Supports easy creation of a TestSuite where a subclass' "FailureExpected" version of a base - * test is included in the suite, while the base test is excluded. E.g. test class FooTestCase - * includes method testBar(), while test class SubFooTestCase extends FooTestCase includes method - * testBarFailureExcluded(). Passing SubFooTestCase.class to this method will return a suite that - * does not include testBar(). - * - * FIXME Move this to UnitTestCase - */ - public static TestSuite createFailureExpectedSuite(Class testClass) { - - TestSuite allTests = new TestSuite(testClass); - Set failureExpected = new HashSet(); - Enumeration tests = allTests.tests(); - while (tests.hasMoreElements()) { - Test t = (Test) tests.nextElement(); - if (t instanceof TestCase) { - String name = ((TestCase) t).getName(); - if (name.endsWith("FailureExpected")) - failureExpected.add(name); - } - } - - TestSuite result = new TestSuite(); - tests = allTests.tests(); - while (tests.hasMoreElements()) { - Test t = (Test) tests.nextElement(); - if (t instanceof TestCase) { - String name = ((TestCase) t).getName(); - if (!failureExpected.contains(name + "FailureExpected")) { - result.addTest(t); - } - } - } - - return result; + public static void stopRegionFactory(InfinispanRegionFactory factory, + CacheTestSupport testSupport) { + factory.stop(); + testSupport.unregisterFactory(factory); } /** diff --git a/hibernate-infinispan/src/test/resources/2lc-test-tcp.xml b/hibernate-infinispan/src/test/resources/2lc-test-tcp.xml new file mode 100644 index 0000000000..8c0c9e9b8b --- /dev/null +++ b/hibernate-infinispan/src/test/resources/2lc-test-tcp.xml @@ -0,0 +1,90 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/hibernate-infinispan/src/test/resources/log4j.properties b/hibernate-infinispan/src/test/resources/log4j.properties index 42093273f6..11d012584d 100755 --- a/hibernate-infinispan/src/test/resources/log4j.properties +++ b/hibernate-infinispan/src/test/resources/log4j.properties @@ -9,4 +9,4 @@ log4j.logger.org.hibernate.test=info log4j.logger.org.hibernate.cache=info # SQL Logging - HHH-6833 -log4j.logger.org.hibernate.SQL=debug \ No newline at end of file +log4j.logger.org.hibernate.SQL=warn \ No newline at end of file diff --git a/hibernate-infinispan/src/test/resources/stress-local-infinispan.xml b/hibernate-infinispan/src/test/resources/stress-local-infinispan.xml new file mode 100644 index 0000000000..def1c2a502 --- /dev/null +++ b/hibernate-infinispan/src/test/resources/stress-local-infinispan.xml @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/hibernate-testing/src/main/java/org/hibernate/testing/cache/CachingRegionFactory.java b/hibernate-testing/src/main/java/org/hibernate/testing/cache/CachingRegionFactory.java index b6453dc475..71d438e750 100644 --- a/hibernate-testing/src/main/java/org/hibernate/testing/cache/CachingRegionFactory.java +++ b/hibernate-testing/src/main/java/org/hibernate/testing/cache/CachingRegionFactory.java @@ -45,6 +45,11 @@ public class CachingRegionFactory extends AbstractRegionFactory { private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, CachingRegionFactory.class.getName() ); + + public static String DEFAULT_ACCESSTYPE = "DefaultAccessType"; + + private Properties properties; + public CachingRegionFactory() { LOG.warn( "CachingRegionFactory should be only used for testing." ); } @@ -52,6 +57,7 @@ public class CachingRegionFactory extends AbstractRegionFactory { public CachingRegionFactory(Properties properties) { //add here to avoid run into catch LOG.warn( "CachingRegionFactory should be only used for testing." ); + this.properties=properties; } @@ -71,7 +77,10 @@ public class CachingRegionFactory extends AbstractRegionFactory { @Override public AccessType getDefaultAccessType() { - return AccessType.NONSTRICT_READ_WRITE; + if (properties != null && properties.get(DEFAULT_ACCESSTYPE) != null) { + return AccessType.fromExternalName(properties.getProperty(DEFAULT_ACCESSTYPE)); + } + return AccessType.READ_WRITE; } @Override diff --git a/hibernate-testing/src/main/java/org/hibernate/testing/cache/NonstrictReadWriteNaturalIdRegionAccessStrategy.java b/hibernate-testing/src/main/java/org/hibernate/testing/cache/NonstrictReadWriteNaturalIdRegionAccessStrategy.java index 381971908c..3de694f7d6 100644 --- a/hibernate-testing/src/main/java/org/hibernate/testing/cache/NonstrictReadWriteNaturalIdRegionAccessStrategy.java +++ b/hibernate-testing/src/main/java/org/hibernate/testing/cache/NonstrictReadWriteNaturalIdRegionAccessStrategy.java @@ -42,4 +42,31 @@ class NonstrictReadWriteNaturalIdRegionAccessStrategy extends BaseNaturalIdRegio public void remove(Object key) throws CacheException { evict( key ); } + + /** + * Returns false since this is an asynchronous cache access strategy. + * @see org.hibernate.cache.ehcache.internal.strategy.NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy + */ + @Override + public boolean insert(Object key, Object value ) throws CacheException { + return false; + } + + /** + * Returns false since this is a non-strict read/write cache access strategy + * @see org.hibernate.cache.ehcache.internal.strategy.NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy + */ + @Override + public boolean afterInsert(Object key, Object value ) throws CacheException { + return false; + } + + /** + * Removes the entry since this is a non-strict read/write cache strategy. + * @see org.hibernate.cache.ehcache.internal.strategy.NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy + */ + public boolean update(Object key, Object value ) throws CacheException { + remove( key ); + return false; + } } diff --git a/hibernate-testing/src/main/java/org/hibernate/testing/junit4/BaseCoreFunctionalTestCase.java b/hibernate-testing/src/main/java/org/hibernate/testing/junit4/BaseCoreFunctionalTestCase.java index 648cbf82ce..574e954b24 100644 --- a/hibernate-testing/src/main/java/org/hibernate/testing/junit4/BaseCoreFunctionalTestCase.java +++ b/hibernate-testing/src/main/java/org/hibernate/testing/junit4/BaseCoreFunctionalTestCase.java @@ -502,10 +502,8 @@ public abstract class BaseCoreFunctionalTestCase extends BaseUnitTestCase { sessionFactory.getCache().evictNaturalIdRegions(); } } - - protected boolean isCleanupTestDataRequired() { - return false; - } + + protected boolean isCleanupTestDataRequired() { return false; } protected void cleanupTestData() throws Exception { Session s = openSession(); diff --git a/libraries.gradle b/libraries.gradle index 16a3f58ab1..d57ca176ad 100644 --- a/libraries.gradle +++ b/libraries.gradle @@ -31,7 +31,7 @@ ext { junitVersion = '4.10' h2Version = '1.2.145' bytemanVersion = '1.5.2' - infinispanVersion = '5.1.6.FINAL' + infinispanVersion = '5.2.0.Beta3' jnpVersion = '5.0.6.CR1' libraries = [