merge master
This commit is contained in:
commit
cda5d8e959
|
@ -35,6 +35,7 @@ Do yo thing!
|
||||||
up the related commits and display them on the Jira issue.
|
up the related commits and display them on the Jira issue.
|
||||||
* Make sure you have added the necessary tests for your changes.
|
* Make sure you have added the necessary tests for your changes.
|
||||||
* Run _all_ the tests to assure nothing else was accidentally broken.
|
* Run _all_ the tests to assure nothing else was accidentally broken.
|
||||||
|
* Make sure your source does not violate the checkstyles.
|
||||||
|
|
||||||
_Prior to commiting, if you want to pull in the latest upstream changes (highly
|
_Prior to commiting, if you want to pull in the latest upstream changes (highly
|
||||||
appreciated btw), please use rebasing rather than merging. Merging creates
|
appreciated btw), please use rebasing rather than merging. Merging creates
|
||||||
|
|
|
@ -71,6 +71,9 @@ subprojects { subProject ->
|
||||||
|
|
||||||
group = 'org.hibernate'
|
group = 'org.hibernate'
|
||||||
version = rootProject.hibernateTargetVersion
|
version = rootProject.hibernateTargetVersion
|
||||||
|
// The OSGi manifest exported package versions need to be only the numerical release -- no "SNAPSHOT" or "Final"
|
||||||
|
exportPackageVersion = version.replaceAll("-SNAPSHOT", "");
|
||||||
|
exportPackageVersion = exportPackageVersion.replaceAll(".Final", "");
|
||||||
|
|
||||||
// minimize changes, at least for now (gradle uses 'build' by default)..
|
// minimize changes, at least for now (gradle uses 'build' by default)..
|
||||||
buildDir = "target"
|
buildDir = "target"
|
||||||
|
@ -229,7 +232,7 @@ subprojects { subProject ->
|
||||||
privatePackages.add( packageName );
|
privatePackages.add( packageName );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
exportPackages.add( packageName );
|
exportPackages.add( packageName + ";version=\"" + exportPackageVersion + "\"" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,53 @@ match the actual issue resolution (i.e. a bug might not be a bug). Please
|
||||||
refer to the particular case on JIRA using the issue tracking number to learn
|
refer to the particular case on JIRA using the issue tracking number to learn
|
||||||
more about each case.
|
more about each case.
|
||||||
|
|
||||||
|
Changes in version 4.3.0.Beta3 (2013.05.29)
|
||||||
|
------------------------------------------------------------------------------------------------------------------------
|
||||||
|
https://hibernate.atlassian.net/browse/HHH/fixforversion/13451
|
||||||
|
|
||||||
|
** Sub-task
|
||||||
|
* [HHH-8144] - Create a 'release' task that performs all tasks needed for doing a release
|
||||||
|
|
||||||
|
** Bug
|
||||||
|
* [HHH-2664] - full join not working
|
||||||
|
* [HHH-5465] - HQL left join fetch of an element collection following a left join fetch of a one-to-one relationship causes NullPointerException
|
||||||
|
* [HHH-6813] - @Id @OneToOne cause NullPointerException during query
|
||||||
|
* [HHH-8083] - @OrderColumn not updated on @OneToMany cascade
|
||||||
|
* [HHH-8219] - Protect against JDK 7 API usage
|
||||||
|
* [HHH-8220] - pom dependencies scope changed from compile to runtime
|
||||||
|
* [HHH-8225] - EMF cannot be created, closed, then re-created in OSGi
|
||||||
|
* [HHH-8233] - exclude-unlisted-classes is not excluding unlisted classes in root
|
||||||
|
* [HHH-8236] - Update to hibernate-commons-annotations 4.0.2.Final
|
||||||
|
* [HHH-8241] - Reusing of scanner instance is impossible with Apache Aries JPA
|
||||||
|
* [HHH-8250] - DefaultInitializeCollectionEventListener not finding collections in cache
|
||||||
|
* [HHH-8254] - throw HibernateException when transaction is rolledback by a reaper thread
|
||||||
|
* [HHH-8266] - Binding of named-stored-procedure XML element tries to create duplicate
|
||||||
|
* [HHH-8269] - DenormalizedTable FK constraint names can be too long
|
||||||
|
* [HHH-8270] - Support for accessing JPA schema export script files specified by URL
|
||||||
|
* [HHH-8271] - Handling of javax.persistence.sql-load-script-source
|
||||||
|
* [HHH-8273] - Incorrect "unique-key" naming comment in docs
|
||||||
|
|
||||||
|
** Improvement
|
||||||
|
* [HHH-6875] - @OrderBy on @ElementCollection of basic type should "order by value"
|
||||||
|
* [HHH-7214] - DiscriminatorValue
|
||||||
|
* [HHH-7582] - TableGenerator does not distinguish between different tenants (MultiTenant Schema based)
|
||||||
|
* [HHH-8211] - Checkstyle and FindBugs fix-ups
|
||||||
|
* [HHH-8217] - Make generated constraint names short and non-random
|
||||||
|
* [HHH-8226] - table synonyms cannot find columns on Oracle
|
||||||
|
* [HHH-8231] - Pass along IOException as cause when trying to open script outputs
|
||||||
|
* [HHH-8238] - OsgiJtaPlatform null pointer exception
|
||||||
|
* [HHH-8257] - More concisely obtaining a JBoss logger
|
||||||
|
|
||||||
|
** Task
|
||||||
|
* [HHH-8218] - Update to final versions of BV 1.1 and HV 5
|
||||||
|
* [HHH-8222] - Implement @NamedStoredProcedureQuery binding
|
||||||
|
* [HHH-8223] - Implement @NamedEntityGraph binding
|
||||||
|
* [HHH-8232] - Upgrade to Gradle 1.6
|
||||||
|
* [HHH-8235] - Drop database profiles upstream
|
||||||
|
* [HHH-8246] - Implement XML binding of NamedStoredProcedureQuery
|
||||||
|
* [HHH-8247] - Implement XML binding of NamedEntityGraph
|
||||||
|
|
||||||
|
|
||||||
Changes in version 4.3.0.Beta2 (2013.05.02)
|
Changes in version 4.3.0.Beta2 (2013.05.02)
|
||||||
------------------------------------------------------------------------------------------------------------------------
|
------------------------------------------------------------------------------------------------------------------------
|
||||||
https://hibernate.atlassian.net/browse/HHH/fixforversion/13052
|
https://hibernate.atlassian.net/browse/HHH/fixforversion/13052
|
||||||
|
|
|
@ -50,6 +50,12 @@
|
||||||
<surname>Badner</surname>
|
<surname>Badner</surname>
|
||||||
</personname>
|
</personname>
|
||||||
</author>
|
</author>
|
||||||
|
<author>
|
||||||
|
<personname>
|
||||||
|
<firstname>Brett</firstname>
|
||||||
|
<surname>Meyer</surname>
|
||||||
|
</personname>
|
||||||
|
</author>
|
||||||
|
|
||||||
<othercredit>
|
<othercredit>
|
||||||
<personname>
|
<personname>
|
||||||
|
|
|
@ -522,9 +522,8 @@
|
||||||
The <option>not-null</option> and <option>UNIQUE</option> attributes generate constraints on table columns.
|
The <option>not-null</option> and <option>UNIQUE</option> attributes generate constraints on table columns.
|
||||||
</para>
|
</para>
|
||||||
<para>
|
<para>
|
||||||
The unique-key attribute groups columns in a single, unique key constraint. Currently, the specified value
|
The unique-key attribute groups columns in a single, unique key constraint. The attribute overrides
|
||||||
of the unique-key attribute does not name the constraint in the generated DDL. It only groups the columns in
|
the name of any generated unique key constraint.
|
||||||
the mapping file.
|
|
||||||
</para>
|
</para>
|
||||||
<programlisting language="XML" role="XML"><xi:include href="extras/notnull-unique.xml" xmlns:xi="http://www.w3.org/2001/XInclude" parse="text" /></programlisting>
|
<programlisting language="XML" role="XML"><xi:include href="extras/notnull-unique.xml" xmlns:xi="http://www.w3.org/2001/XInclude" parse="text" /></programlisting>
|
||||||
</step>
|
</step>
|
||||||
|
|
|
@ -380,8 +380,12 @@
|
||||||
<para>
|
<para>
|
||||||
If you want to audit a relation, where the target entity is not audited (that is the case for example with
|
If you want to audit a relation, where the target entity is not audited (that is the case for example with
|
||||||
dictionary-like entities, which don't change and don't have to be audited), just annotate it with
|
dictionary-like entities, which don't change and don't have to be audited), just annotate it with
|
||||||
<literal>@Audited(targetAuditMode = RelationTargetAuditMode.NOT_AUDITED)</literal>. Then, when reading historic
|
<literal>@Audited(targetAuditMode = RelationTargetAuditMode.NOT_AUDITED)</literal>. Then, while reading historic
|
||||||
versions of your entity, the relation will always point to the "current" related entity.
|
versions of your entity, the relation will always point to the "current" related entity. By default Envers
|
||||||
|
throws <classname>javax.persistence.EntityNotFoundException</classname> when "current" entity does not
|
||||||
|
exist in the database. Apply <literal>@NotFound(action = NotFoundAction.IGNORE)</literal> annotation
|
||||||
|
to silence the exception and assign null value instead. Hereby solution causes implicit eager loading
|
||||||
|
of to-one relations.
|
||||||
</para>
|
</para>
|
||||||
|
|
||||||
<para>
|
<para>
|
||||||
|
|
|
@ -52,6 +52,7 @@
|
||||||
<xi:include href="JMX.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
<xi:include href="JMX.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
||||||
<xi:include href="Envers.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
<xi:include href="Envers.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
||||||
<xi:include href="chapters/multitenancy/Multi_Tenancy.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
<xi:include href="chapters/multitenancy/Multi_Tenancy.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
||||||
|
<xi:include href="chapters/osgi/OSGi.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
||||||
<xi:include href="appendix-Configuration_Properties.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
<xi:include href="appendix-Configuration_Properties.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
||||||
<xi:include href="appendices/legacy_criteria/Legacy_Criteria.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
<xi:include href="appendices/legacy_criteria/Legacy_Criteria.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
|
||||||
</book>
|
</book>
|
||||||
|
|
|
@ -0,0 +1,422 @@
|
||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
|
||||||
|
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||||
|
xmlns:xl="http://www.w3.org/1999/xlink"
|
||||||
|
xmlns:xi="http://www.w3.org/2001/XInclude">
|
||||||
|
<info>
|
||||||
|
<title>OSGi</title>
|
||||||
|
<abstract>
|
||||||
|
<para>
|
||||||
|
The Open Services Gateway initiative (OSGi) specification describes a dynamic, modularized system. "Bundles"
|
||||||
|
(components) can be installed, activated, deactivated, and uninstalled during runtime, without requiring
|
||||||
|
a system restart. OSGi frameworks manage bundles' dependencies, packages, and classes. The framework
|
||||||
|
is also in charge of ClassLoading, managing visibility of packages between bundles. Further, service
|
||||||
|
registry and discovery is provided through a "whiteboard" pattern.
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
OSGi environments present numerous, unique challenges. Most notably, the dynamic nature of available
|
||||||
|
bundles during runtime can require significant architectural considerations. Also,
|
||||||
|
architectures must allow the OSGi-specific ClassLoading and service registration/discovery.
|
||||||
|
</para>
|
||||||
|
</abstract>
|
||||||
|
</info>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>OSGi Specification and Environment</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Hibernate targets the OSGi 4.3 spec or later. It was necessary to start with 4.3, over 4.2, due to our
|
||||||
|
dependency on OSGi's <literal>BundleWiring</literal> for entity/mapping scanning.
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Hibernate supports three types of configurations within OSGi.
|
||||||
|
|
||||||
|
<orderedlist>
|
||||||
|
<listitem>
|
||||||
|
<firstterm>Container-Managed JPA</firstterm>: <xref linkend="osgi-managed-jpa"/>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<firstterm>Unmanaged JPA</firstterm>: <xref linkend="osgi-unmanaged-jpa"/>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<firstterm>Unmanaged Native</firstterm>: <xref linkend="osgi-unmanaged-native"/>
|
||||||
|
</listitem>
|
||||||
|
</orderedlist>
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>hibernate-osgi</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Rather than embed OSGi capabilities into hibernate-core, hibernate-entitymanager, and sub-modules,
|
||||||
|
hibernate-osgi was created. It's purposefully separated, isolating all OSGi dependencies. It provides an
|
||||||
|
OSGi-specific ClassLoader (aggregates the container's CL with core and entitymanager CLs), JPA persistence
|
||||||
|
provider, SF/EMF bootstrapping, entities/mappings scanner, and service management.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section xml:id="osgi-managed-jpa">
|
||||||
|
<title>Container-Managed JPA</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
The Enterprise OSGi specification includes container-managed JPA. The container is responsible for
|
||||||
|
discovering persistence units and creating the <literal>EntityManagerFactory</literal> (one EMF per PU).
|
||||||
|
It uses the JPA provider (hibernate-osgi) that has registered itself with the OSGi
|
||||||
|
<literal>PersistenceProvider</literal> service.
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Quickstart tutorial project, demonstrating a container-managed JPA client bundle:
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/tree/master/documentation/src/main/docbook/quickstart/tutorials/osgi/managed-jpa">managed-jpa</link>
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Client bundle imports</title>
|
||||||
|
<para>
|
||||||
|
Your client bundle's manifest will need to import, at a minimum,
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem>
|
||||||
|
<literal>javax.persistence</literal>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
<literal>org.hibernate.proxy</literal> and <literal>javassist.util.proxy</literal>, due to
|
||||||
|
Hibernate's ability to return proxies for lazy initialization (Javassist enhancement
|
||||||
|
occurs on the entity's ClassLoader during runtime).
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>JPA 2.1</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
No Enterprise OSGi JPA container currently supports JPA 2.1 (the spec is not yet released). For
|
||||||
|
testing, the managed-jpa example makes use of
|
||||||
|
<link xl:href="https://github.com/brmeyer/aries/tree/jpa21">Brett's fork of Aries</link>. To work
|
||||||
|
with Hibernate 4.3, clone the fork and build Aries JPA.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>DataSource</title>
|
||||||
|
<para>
|
||||||
|
Typical Enterprise OSGi JPA usage includes a DataSource installed in the container. The client
|
||||||
|
bundle's <literal>persistence.xml</literal> uses the DataSource through JNDI. For an example,
|
||||||
|
see the QuickStart's DataSource:
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/datasource-h2.xml">datasource-h2.xml</link>
|
||||||
|
The DataSource is then called out in
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/managed-jpa/src/main/resources/META-INF/persistence.xml">
|
||||||
|
persistence.xml's</link> <literal>jta-data-source</literal>.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Bundle Ordering</title>
|
||||||
|
<para>
|
||||||
|
Hibernate currently requires fairly specific bundle activation ordering. See the managed-jpa
|
||||||
|
QuickStart's
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/managed-jpa/features.xml">features.xml</link>
|
||||||
|
for the best supported sequence.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Obtaining an EntityManger</title>
|
||||||
|
<para>
|
||||||
|
The easiest, and most supported, method of obtaining an <literal>EntityManager</literal> utilizes OSGi's
|
||||||
|
<literal>blueprint.xml</literal>. The container takes the name of your persistence unit, then injects
|
||||||
|
an <literal>EntityManager</literal> instance into your given bean attribute. See the
|
||||||
|
<literal>dpService</literal> bean in the managed-jpa QuickStart's
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/managed-jpa/src/main/resources/OSGI-INF/blueprint/blueprint.xml">blueprint.xml</link>
|
||||||
|
for an example.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section xml:id="osgi-unmanaged-jpa">
|
||||||
|
<title>Unmanaged JPA</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Hibernate also supports the use of JPA through hibernate-entitymanager, unmanaged by the OSGi
|
||||||
|
container. The client bundle is responsible for managing the EntityManagerFactory and EntityManagers.
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Quickstart tutorial project, demonstrating an unmanaged JPA client bundle:
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/tree/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-jpa">unmanaged-jpa</link>
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Client bundle imports</title>
|
||||||
|
<para>
|
||||||
|
Your client bundle's manifest will need to import, at a minimum,
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem>
|
||||||
|
<literal>javax.persistence</literal>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
<literal>org.hibernate.proxy</literal> and <literal>javassist.util.proxy</literal>, due to
|
||||||
|
Hibernate's ability to return proxies for lazy initialization (Javassist enhancement
|
||||||
|
occurs on the entity's ClassLoader during runtime)
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
JDBC driver package (example: <literal>org.h2</literal>)
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
<literal>org.osgi.framework</literal>, necessary to discover the EMF (described below)
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Bundle Ordering</title>
|
||||||
|
<para>
|
||||||
|
Hibernate currently requires fairly specific bundle activation ordering. See the unmanaged-jpa
|
||||||
|
QuickStart's
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-jpa/features.xml">features.xml</link>
|
||||||
|
for the best supported sequence.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Obtaining an EntityMangerFactory</title>
|
||||||
|
<para>
|
||||||
|
hibernate-osgi registers an OSGi service, using the JPA <literal>PersistenceProvider</literal> interface
|
||||||
|
name, that bootstraps and creates an <literal>EntityManagerFactory</literal> specific for OSGi
|
||||||
|
environments. It is VITAL that your EMF be obtained through the service, rather than creating it
|
||||||
|
manually. The service handles the OSGi ClassLoader, discovered extension points, scanning, etc. Manually
|
||||||
|
creating an <literal>EntityManagerFactory</literal> is guaranteed to NOT work during runtime!
|
||||||
|
</para>
|
||||||
|
<para>
|
||||||
|
For an example on how to discover and use the service, see the unmanaged-jpa
|
||||||
|
QuickStart's
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-jpa/src/main/java/org/hibernate/osgitest/HibernateUtil.java">HibernateUtil.java</link>.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section xml:id="osgi-unmanaged-native">
|
||||||
|
<title>Unmanaged Native</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Native Hibernate use is also supported. The client bundle is responsible for managing the
|
||||||
|
SessionFactory and Sessions.
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Quickstart tutorial project, demonstrating an unmanaged native client bundle:
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/tree/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native">unmanaged-native</link>
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Client bundle imports</title>
|
||||||
|
<para>
|
||||||
|
Your client bundle's manifest will need to import, at a minimum,
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem>
|
||||||
|
<literal>javax.persistence</literal>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
<literal>org.hibernate.proxy</literal> and <literal>javassist.util.proxy</literal>, due to
|
||||||
|
Hibernate's ability to return proxies for lazy initialization (Javassist enhancement
|
||||||
|
occurs on the entity's ClassLoader during runtime)
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
JDBC driver package (example: <literal>org.h2</literal>)
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
<literal>org.osgi.framework</literal>, necessary to discover the SF (described below)
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
<literal>org.hibernate.*</literal> packages, as necessary (ex: cfg, criterion, service, etc.)
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Bundle Ordering</title>
|
||||||
|
<para>
|
||||||
|
Hibernate currently requires fairly specific bundle activation ordering. See the unmanaged-native
|
||||||
|
QuickStart's
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native/features.xml">features.xml</link>
|
||||||
|
for the best supported sequence.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Obtaining an SessionFactory</title>
|
||||||
|
<para>
|
||||||
|
hibernate-osgi registers an OSGi service, using the <literal>SessionFactory</literal> interface
|
||||||
|
name, that bootstraps and creates an <literal>SessionFactory</literal> specific for OSGi
|
||||||
|
environments. It is VITAL that your SF be obtained through the service, rather than creating it
|
||||||
|
manually. The service handles the OSGi ClassLoader, discovered extension points, scanning, etc. Manually
|
||||||
|
creating an <literal>SessionFactory</literal> is guaranteed to NOT work during runtime!
|
||||||
|
</para>
|
||||||
|
<para>
|
||||||
|
For an example on how to discover and use the service, see the unmanaged-native
|
||||||
|
QuickStart's
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native/src/main/java/org/hibernate/osgitest/HibernateUtil.java">HibernateUtil.java</link>.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Optional Modules</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
The <link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native">unmanaged-native</link>
|
||||||
|
QuickStart project demonstrates the use of optional Hibernate modules. Each module adds additional
|
||||||
|
dependency bundles that must first be activated
|
||||||
|
(see <link xl:href="https://github.com/hibernate/hibernate-orm/blob/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native/features.xml">features.xml</link>).
|
||||||
|
As of ORM 4.2, Envers is fully supported. Support for C3P0, Proxool, EhCache, and Infinispan were added in
|
||||||
|
4.3, however none of their 3rd party libraries currently work in OSGi (lots of ClassLoader problems, etc.).
|
||||||
|
We're tracking the issues in JIRA.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Extension Points</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Multiple contracts exist to allow applications to integrate with and extend Hibernate capabilities. Most
|
||||||
|
apps utilize JDK services to provide their implementations. hibernate-osgi supports the same
|
||||||
|
extensions through OSGi services. Implement and register them in any of the three configurations.
|
||||||
|
hibernate-osgi will discover and integrate them during EMF/SF bootstrapping. Supported extension points
|
||||||
|
are as follows. The specified interface should be used during service registration.
|
||||||
|
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem>
|
||||||
|
<literal>org.hibernate.integrator.spi.Integrator</literal> (as of 4.2)
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<literal>org.hibernate.boot.registry.selector.StrategyRegistrationProvider</literal> (as of 4.3)
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<literal>org.hibernate.metamodel.spi.TypeContributor</literal> (as of 4.3)
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
JTA's <literal>javax.transaction.TransactionManager</literal> and
|
||||||
|
<literal>javax.transaction.UserTransaction</literal> (as of 4.2), however these are typically
|
||||||
|
provided by the OSGi container.
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
The easiest way to register extension point implementations is through a <literal>blueprint.xml</literal>
|
||||||
|
file. Add <literal>OSGI-INF/blueprint/blueprint.xml</literal> to your classpath. Envers' blueprint
|
||||||
|
is a great example:
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<example>
|
||||||
|
<title>Example extension point registrations in blueprint.xml</title>
|
||||||
|
<programlisting role="XML"><xi:include href="extras/extension_point_blueprint.xml" parse="text"/></programlisting>
|
||||||
|
</example>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Extension points can also be registered programmatically with
|
||||||
|
<literal>BundleContext#registerService</literal>, typically within your
|
||||||
|
<literal>BundleActivator#start</literal>.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Caveats</title>
|
||||||
|
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
Technically, multiple persistence units are supported by Enterprise OSGi JPA and unmanaged
|
||||||
|
Hibernate JPA use. However, we cannot currently support this in OSGi. In Hibernate 4, only one
|
||||||
|
instance of the OSGi-specific ClassLoader is used per Hibernate bundle, mainly due to heavy use of
|
||||||
|
static TCCL utilities. We hope to support one OSGi ClassLoader per persistence unit in
|
||||||
|
Hibernate 5.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
Scanning is supported to find non-explicitly listed entities and mappings. However, they MUST be
|
||||||
|
in the same bundle as your persistence unit (fairly typical anyway). Our OSGi ClassLoader only
|
||||||
|
considers the "requesting bundle" (hence the requirement on using services to create EMF/SF),
|
||||||
|
rather than attempting to scan all available bundles. This is primarily for versioning
|
||||||
|
considerations, collision protections, etc.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
Some containers (ex: Aries) always return true for
|
||||||
|
<literal>PersistenceUnitInfo#excludeUnlistedClasses</literal>,
|
||||||
|
even if your persistence.xml explicitly has <literal>exclude-unlisted-classes</literal> set
|
||||||
|
to <literal>false</literal>. They claim it's to protect JPA providers from having to implement
|
||||||
|
scanning ("we handle it for you"), even though we still want to support it in many cases. The work
|
||||||
|
around is to set <literal>hibernate.archive.autodetection</literal> to, for example,
|
||||||
|
<literal>hbm,class</literal>. This tells hibernate to ignore the excludeUnlistedClasses value and
|
||||||
|
scan for <literal>*.hbm.xml</literal> and entities regardless.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
Scanning does not currently support annotated packages on <literal>package-info.java</literal>.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
Currently, Hibernate OSGi is primarily tested using Apache Karaf and Apache Aries JPA. Additional
|
||||||
|
testing is needed with Equinox, Gemini, and other container providers.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
Hibernate ORM has many dependencies that do not currently provide OSGi manifests.
|
||||||
|
The QuickStart tutorials make heavy use of 3rd party bundles (SpringSource, ServiceMix) or the
|
||||||
|
<literal>wrap:...</literal> operator.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
As previously mentioned, bundle activation is currently order specific. See the QuickStart
|
||||||
|
tutorials' <literal>features.xml</literal> for example sequences.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
The environment should be considered STATIC. Hibernate currently does not support the ability
|
||||||
|
to dynamically add and remove client bundles during runtime. Doing so is typically
|
||||||
|
catastrophic. We hope to better support at least partially-dynamic environments in
|
||||||
|
Hibernate 5.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
No Enterprise OSGi JPA container currently supports JPA 2.1 (the spec is not yet released). For
|
||||||
|
testing, the managed-jpa example makes use of
|
||||||
|
<link xl:href="https://github.com/brmeyer/aries/tree/jpa21">Brett's fork of Aries</link>. To work
|
||||||
|
with Hibernate 4.3, clone the fork and build Aries JPA.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</chapter>
|
|
@ -0,0 +1,11 @@
|
||||||
|
<blueprint default-activation="eager"
|
||||||
|
xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
|
||||||
|
<bean id="integrator" class="org.hibernate.envers.event.spi.EnversIntegrator" />
|
||||||
|
<service ref="integrator" interface="org.hibernate.integrator.spi.Integrator" />
|
||||||
|
|
||||||
|
<bean id="typeContributor"
|
||||||
|
class="org.hibernate.envers.internal.entities.TypeContributorImpl" />
|
||||||
|
<service ref="typeContributor" interface="org.hibernate.metamodel.spi.TypeContributor" />
|
||||||
|
|
||||||
|
</blueprint>
|
|
@ -95,10 +95,8 @@
|
||||||
|
|
||||||
<para>
|
<para>
|
||||||
A <literal>unique-key</literal> attribute can be used to group columns in
|
A <literal>unique-key</literal> attribute can be used to group columns in
|
||||||
a single, unique key constraint. Currently, the specified value of the
|
a single, unique key constraint. The attribute overrides
|
||||||
<literal>unique-key</literal> attribute is <emphasis>not</emphasis> used
|
the name of any generated unique key constraint.
|
||||||
to name the constraint in the generated DDL. It is only used to group the columns in
|
|
||||||
the mapping file.
|
|
||||||
</para>
|
</para>
|
||||||
|
|
||||||
<programlisting role="XML"><![CDATA[<many-to-one name="org" column="orgId" unique-key="OrgEmployeeId"/>
|
<programlisting role="XML"><![CDATA[<many-to-one name="org" column="orgId" unique-key="OrgEmployeeId"/>
|
||||||
|
|
|
@ -31,6 +31,12 @@
|
||||||
<surname>Warski</surname>
|
<surname>Warski</surname>
|
||||||
</personname>
|
</personname>
|
||||||
</author>
|
</author>
|
||||||
|
<author>
|
||||||
|
<personname>
|
||||||
|
<firstname>Brett</firstname>
|
||||||
|
<surname>Meyer</surname>
|
||||||
|
</personname>
|
||||||
|
</author>
|
||||||
|
|
||||||
<othercredit>
|
<othercredit>
|
||||||
<personname>
|
<personname>
|
||||||
|
|
|
@ -43,5 +43,6 @@
|
||||||
<xi:include xmlns:xi="http://www.w3.org/2001/XInclude" href="content/tutorial_annotations.xml" />
|
<xi:include xmlns:xi="http://www.w3.org/2001/XInclude" href="content/tutorial_annotations.xml" />
|
||||||
<xi:include xmlns:xi="http://www.w3.org/2001/XInclude" href="content/tutorial_jpa.xml" />
|
<xi:include xmlns:xi="http://www.w3.org/2001/XInclude" href="content/tutorial_jpa.xml" />
|
||||||
<xi:include xmlns:xi="http://www.w3.org/2001/XInclude" href="content/tutorial_envers.xml" />
|
<xi:include xmlns:xi="http://www.w3.org/2001/XInclude" href="content/tutorial_envers.xml" />
|
||||||
|
<xi:include xmlns:xi="http://www.w3.org/2001/XInclude" href="content/tutorial_osgi.xml" />
|
||||||
|
|
||||||
</book>
|
</book>
|
|
@ -0,0 +1,104 @@
|
||||||
|
<?xml version='1.0' encoding='UTF-8' ?>
|
||||||
|
|
||||||
|
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||||
|
xmlns:xl="http://www.w3.org/1999/xlink"
|
||||||
|
xmlns:xi="http://www.w3.org/2001/XInclude">
|
||||||
|
<title>OSGi Tutorial</title>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
Hibernate targets the OSGi 4.3 spec or later and supports three types
|
||||||
|
of configurations.
|
||||||
|
|
||||||
|
<orderedlist>
|
||||||
|
<listitem>
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/tree/master/documentation/src/main/docbook/quickstart/tutorials/osgi/managed-jpa">Container-Managed JPA</link>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/tree/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-jpa">Unmanaged JPA</link>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<link xl:href="https://github.com/hibernate/hibernate-orm/tree/master/documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native">Unmanaged Native</link>
|
||||||
|
</listitem>
|
||||||
|
</orderedlist>
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
For more details about OSGi, the three configurations, hibernate-osgi, extensions points, and caveats, please
|
||||||
|
see the OSGi chapter of the Developer's Guide!
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>
|
||||||
|
Project Overview
|
||||||
|
</title>
|
||||||
|
<para>
|
||||||
|
Each configuration has a QuickStart project located within the download bundle (under <filename>osgi</filename>).
|
||||||
|
The bundles can be used as-is within Apache Karaf. Feel free to use them as literal
|
||||||
|
"quick start" bundle templates.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>
|
||||||
|
Project Structure
|
||||||
|
</title>
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem>
|
||||||
|
<filename>osgi/datasource-h2.xml</filename>: Enterprise OSGi JPA usage can include a DataSource installed in the container.
|
||||||
|
The client bundle's <literal>persistence.xml</literal> references the DataSource through JNDI. For an
|
||||||
|
example, see how managed-jpa's <literal>persistence.xml</literal> calls out the
|
||||||
|
<literal>jta-data-source</literal>.
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<filename>osgi/[project]/features.xml</filename>: This is arguably the most important "quick start" material. It defines
|
||||||
|
a single Karaf feature ("hibernate-test") that demonstrates the necessary 3rd party libraries and
|
||||||
|
bundle activation ordering.
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<filename>osgi/[project]/pom.xml</filename>: The POM includes typical compile-time dependencies (JPA, OSGi Core,
|
||||||
|
OSGi Enterprise), as well as OSGi manifest data.
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<filename>osgi/[project]/src/main/resources/OSGI-INF/blueprint/blueprint.xml</filename>:
|
||||||
|
The Blueprint includes container-managed EntityManager
|
||||||
|
injection (for managed-jpa), as well as demonstrations showing how to register
|
||||||
|
your custom implementations of Hibernate extension points.
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<filename>osgi/[project]/src/main/resources/META-INF/persistence.xml</filename> or
|
||||||
|
<filename>osgi/[project]/src/main/resources/hibernate.cfg.xml</filename>: Note that the configurations
|
||||||
|
are no different than typical uses of Hibernate!
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<filename>osgi/[project]/src/main/java/org/hibernate/osgitest/HibernateUtil.java</filename>: Demonstrates how to create an
|
||||||
|
EntityManagerFactory (JPA) or SessionFactory (Native) using hibernate-osgi's services. Note that in
|
||||||
|
managed-jpa, this is replaced by <literal>DataPointServiceImpl#entityManager</literal>, injected by
|
||||||
|
<filename>blueprint.xml</filename> (described above).
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section xml:id="hibernate-gsg-tutorial-envers-config">
|
||||||
|
<title>
|
||||||
|
TODOs
|
||||||
|
</title>
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem>
|
||||||
|
If using managed-jpa, <filename>features.xml</filename> will need the path to
|
||||||
|
<filename>datasource-h2.xml</filename> updated.
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section xml:id="hibernate-gsg-tutorial-envers-config">
|
||||||
|
<title>
|
||||||
|
Karaf Commands
|
||||||
|
</title>
|
||||||
|
<para>
|
||||||
|
All three bundles include Karaf Commands that can be used directly on the Karaf command line to test
|
||||||
|
basic persistence operations ("dp:add [name]", "dp:getall", "dp:deleteall", etc.). I leave them in the
|
||||||
|
QuickStarts as a useful sanity check.
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</chapter>
|
|
@ -0,0 +1,21 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
First install the H2 driver using:
|
||||||
|
> install -s mvn:com.h2database/h2/1.3.163
|
||||||
|
|
||||||
|
Then copy this file to the deploy folder
|
||||||
|
-->
|
||||||
|
<blueprint xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0">
|
||||||
|
|
||||||
|
<bean id="dataSource" class="org.h2.jdbcx.JdbcDataSource">
|
||||||
|
<property name="URL" value="jdbc:h2:mem:db1;DB_CLOSE_DELAY=-1;MVCC=TRUE"/>
|
||||||
|
<property name="user" value="sa"/>
|
||||||
|
<property name="password" value=""/>
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<service interface="javax.sql.DataSource" ref="dataSource">
|
||||||
|
<service-properties>
|
||||||
|
<entry key="osgi.jndi.service.name" value="jdbc/h2ds"/>
|
||||||
|
</service-properties>
|
||||||
|
</service>
|
||||||
|
</blueprint>
|
1
documentation/src/main/docbook/quickstart/tutorials/osgi/managed-jpa/.gitignore
vendored
Normal file
1
documentation/src/main/docbook/quickstart/tutorials/osgi/managed-jpa/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
/target
|
|
@ -0,0 +1,74 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<features>
|
||||||
|
<feature name="hibernate-test">
|
||||||
|
|
||||||
|
<feature>karaf-framework</feature>
|
||||||
|
|
||||||
|
<!-- JTA -->
|
||||||
|
<config name="org.apache.aries.transaction">
|
||||||
|
aries.transaction.recoverable = true
|
||||||
|
aries.transaction.timeout = 600
|
||||||
|
aries.transaction.howl.maxLogFiles = 2
|
||||||
|
aries.transaction.howl.maxBlocksPerFile = 512
|
||||||
|
aries.transaction.howl.bufferSizeKBytes = 4
|
||||||
|
</config>
|
||||||
|
<bundle start-level="30">mvn:org.apache.geronimo.specs/geronimo-jta_1.1_spec/1.1.1</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.transaction/org.apache.aries.transaction.blueprint/1.0.0</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.transaction/org.apache.aries.transaction.manager/1.0.1</bundle>
|
||||||
|
|
||||||
|
<!-- JPA -->
|
||||||
|
<bundle start-level="30">mvn:org.hibernate.javax.persistence/hibernate-jpa-2.1-api/1.0.0-SNAPSHOT</bundle>
|
||||||
|
<!-- No container currently supports JPA 2.1. Clone and build Aries JPA from the following fork (upgrades to
|
||||||
|
JPA 2.1). Aries should be upgrading as soon as the spec is out.
|
||||||
|
https://github.com/brmeyer/aries/tree/jpa21 -->
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries/org.apache.aries.util/1.0.0</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jpa/org.apache.aries.jpa.api/1.0.1-SNAPSHOT</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jpa/org.apache.aries.jpa.blueprint.aries/1.0.2-SNAPSHOT</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jpa/org.apache.aries.jpa.container/1.0.1-SNAPSHOT</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jpa/org.apache.aries.jpa.container.context/1.0.2-SNAPSHOT</bundle>
|
||||||
|
|
||||||
|
<!-- JNDI -->
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jndi/org.apache.aries.jndi.api/1.0.0</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jndi/org.apache.aries.jndi.core/1.0.0</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jndi/org.apache.aries.jndi.rmi/1.0.0</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jndi/org.apache.aries.jndi.url/1.0.0</bundle>
|
||||||
|
<bundle start-level="30">mvn:org.apache.aries.jndi/org.apache.aries.jndi.legacy.support/1.0.0</bundle>
|
||||||
|
|
||||||
|
<!-- Taken from Karaf-Tutorial -->
|
||||||
|
<bundle>mvn:commons-collections/commons-collections/3.2.1</bundle>
|
||||||
|
<bundle>mvn:commons-pool/commons-pool/1.5.4</bundle>
|
||||||
|
<bundle>mvn:commons-dbcp/commons-dbcp/1.4</bundle>
|
||||||
|
<bundle>mvn:commons-lang/commons-lang/2.6</bundle>
|
||||||
|
<bundle>wrap:mvn:net.sourceforge.serp/serp/1.13.1</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:com.h2database/h2/1.3.170</bundle>
|
||||||
|
<bundle>blueprint:file:/[PATH]/datasource-h2.xml</bundle>
|
||||||
|
|
||||||
|
<!-- These do not natively support OSGi, so using 3rd party bundles. -->
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.antlr/2.7.7_5</bundle>
|
||||||
|
<bundle>mvn:org.jboss.javassist/com.springsource.javassist/3.15.0.GA</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.specs/org.apache.servicemix.specs.jsr303-api-1.0.0/2.2.0</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.ant/1.8.2_2</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.dom4j/1.6.1_5</bundle>
|
||||||
|
|
||||||
|
<!-- These do not natively support OSGi, so wrap with BND. -->
|
||||||
|
<bundle>wrap:mvn:org.jboss/jandex/1.1.0.Alpha1</bundle>
|
||||||
|
|
||||||
|
<bundle>wrap:mvn:org.hibernate.common/hibernate-commons-annotations/4.0.2.Final</bundle>
|
||||||
|
<bundle>mvn:com.fasterxml/classmate/0.5.4</bundle>
|
||||||
|
<bundle>mvn:org.jboss.logging/jboss-logging/3.1.0.GA</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-core/4.3.0-SNAPSHOT</bundle>
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-entitymanager/4.3.0-SNAPSHOT</bundle>
|
||||||
|
|
||||||
|
<!-- TODO: It seems that the persistence unit bundle needs to be started
|
||||||
|
before hibernate-osgi. When the BundleActivator is started,
|
||||||
|
the persistence unit is provided even though managed-jpa
|
||||||
|
hasn't completely started yet. If that happens, you'll get an "illegal
|
||||||
|
bundle state" exception. Is there a way for the activator to
|
||||||
|
watch for bundles with PUs before registering the persistence provider? -->
|
||||||
|
<bundle>mvn:org.hibernate.osgi/managed-jpa/1.0.0</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-osgi/4.3.0-SNAPSHOT</bundle>
|
||||||
|
</feature>
|
||||||
|
</features>
|
|
@ -0,0 +1,64 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>org.hibernate.osgi</groupId>
|
||||||
|
<artifactId>managed-jpa</artifactId>
|
||||||
|
<version>1.0.0</version>
|
||||||
|
<packaging>bundle</packaging>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.hibernate.javax.persistence</groupId>
|
||||||
|
<artifactId>hibernate-jpa-2.1-api</artifactId>
|
||||||
|
<version>1.0.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.osgi</groupId>
|
||||||
|
<artifactId>org.osgi.core</artifactId>
|
||||||
|
<version>4.3.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.osgi</groupId>
|
||||||
|
<artifactId>org.osgi.enterprise</artifactId>
|
||||||
|
<version>4.2.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.karaf.shell</groupId>
|
||||||
|
<artifactId>org.apache.karaf.shell.console</artifactId>
|
||||||
|
<version>2.3.0</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.felix</groupId>
|
||||||
|
<artifactId>maven-bundle-plugin</artifactId>
|
||||||
|
<extensions>true</extensions>
|
||||||
|
<configuration>
|
||||||
|
<instructions>
|
||||||
|
<Bundle-SymbolicName>org.hibernate.osgi.managed-jpa</Bundle-SymbolicName>
|
||||||
|
<Bundle-Name>managed-jpa</Bundle-Name>
|
||||||
|
<Bundle-Version>1.0.0</Bundle-Version>
|
||||||
|
<Export-Package>
|
||||||
|
org.hibernate.osgitest,
|
||||||
|
org.hibernate.osgitest.entity
|
||||||
|
</Export-Package>
|
||||||
|
<Import-Package>
|
||||||
|
org.apache.felix.service.command,
|
||||||
|
org.apache.felix.gogo.commands,
|
||||||
|
org.apache.karaf.shell.console,
|
||||||
|
org.apache.karaf.shell.commands,
|
||||||
|
javax.persistence;version="[1.0.0,2.1.0]",
|
||||||
|
<!-- Needed for proxying's Javassist enhancement during runtime -->
|
||||||
|
org.hibernate.proxy,
|
||||||
|
javassist.util.proxy,
|
||||||
|
*
|
||||||
|
</Import-Package>
|
||||||
|
<Meta-Persistence>META-INF/persistence.xml</Meta-Persistence>
|
||||||
|
</instructions>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
public interface DataPointService {
|
||||||
|
|
||||||
|
public void add(DataPoint dp);
|
||||||
|
|
||||||
|
public List<DataPoint> getAll();
|
||||||
|
|
||||||
|
public void deleteAll();
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import javax.persistence.EntityManager;
|
||||||
|
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
public class DataPointServiceImpl implements DataPointService {
|
||||||
|
|
||||||
|
private EntityManager entityManager;
|
||||||
|
|
||||||
|
public void add(DataPoint dp) {
|
||||||
|
entityManager.persist( dp );
|
||||||
|
entityManager.flush();
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<DataPoint> getAll() {
|
||||||
|
return entityManager.createQuery( "select d from DataPoint d", DataPoint.class ).getResultList();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteAll() {
|
||||||
|
entityManager.createQuery( "delete from DataPoint" ).executeUpdate();
|
||||||
|
entityManager.flush();
|
||||||
|
}
|
||||||
|
|
||||||
|
public EntityManager getEntityManager() {
|
||||||
|
return entityManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEntityManager(EntityManager entityManager) {
|
||||||
|
this.entityManager = entityManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "add")
|
||||||
|
public class AddCommand implements Action {
|
||||||
|
@Argument(index=0, name="Name", required=true, description="Name", multiValued=false)
|
||||||
|
String name;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
DataPoint dp = new DataPoint();
|
||||||
|
dp.setName( name );
|
||||||
|
dpService.add( dp );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "deleteAll")
|
||||||
|
public class DeleteAllCommand implements Action {
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
dpService.deleteAll();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "getAll")
|
||||||
|
public class GetAllCommand implements Action {
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
List<DataPoint> dps = dpService.getAll();
|
||||||
|
for (DataPoint dp : dps) {
|
||||||
|
System.out.println(dp.getId() + ", " + dp.getName());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -18,30 +18,36 @@
|
||||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
* MA 02110-1301, USA.
|
* MA 02110-1301, USA.
|
||||||
*/
|
*/
|
||||||
package org.hibernate.test.annotations.join;
|
package org.hibernate.osgitest.entity;
|
||||||
|
|
||||||
import org.hibernate.testing.TestForIssue;
|
import javax.persistence.Entity;
|
||||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
import javax.persistence.GeneratedValue;
|
||||||
import org.junit.Test;
|
import javax.persistence.Id;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Brett Meyer
|
* @author Brett Meyer
|
||||||
*/
|
*/
|
||||||
@TestForIssue( jiraKey = "HHH-2872" )
|
@Entity
|
||||||
public class JoinOrderingTest extends BaseCoreFunctionalTestCase {
|
public class DataPoint {
|
||||||
|
@Id
|
||||||
|
@GeneratedValue
|
||||||
|
private long id;
|
||||||
|
|
||||||
@Override
|
private String name;
|
||||||
protected Class<?>[] getAnnotatedClasses() {
|
|
||||||
// This is the important piece. ProductDetails must be first to
|
public long getId() {
|
||||||
// reproduce the issue.
|
return id;
|
||||||
// return new Class<?>[] { ProductDetails.class, Product.class, ProductVersion.class };
|
|
||||||
// TODO: commented out -- @FailureExpected wasn't working on builds
|
|
||||||
// if it's a MappingException.
|
|
||||||
return new Class<?>[] { };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
public void setId(long id) {
|
||||||
public void testEntityOrderingWithJoins() {
|
this.id = id;
|
||||||
// nothing to do
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<persistence xmlns="http://java.sun.com/xml/ns/persistence"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
version="1.0">
|
||||||
|
<persistence-unit name="managed-jpa" transaction-type="JTA">
|
||||||
|
<jta-data-source>osgi:service/javax.sql.DataSource/(osgi.jndi.service.name=jdbc/h2ds)</jta-data-source>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<property name="hibernate.connection.driver_class" value="org.h2.Driver"/>
|
||||||
|
<property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect"/>
|
||||||
|
<property name="hibernate.hbm2ddl.auto" value="create-drop"/>
|
||||||
|
<property name="hibernate.archive.autodetection" value="class"/>
|
||||||
|
</properties>
|
||||||
|
</persistence-unit>
|
||||||
|
</persistence>
|
|
@ -0,0 +1,58 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
-->
|
||||||
|
<blueprint default-activation="eager"
|
||||||
|
xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:jpa="http://aries.apache.org/xmlns/jpa/v1.0.0"
|
||||||
|
xmlns:tx="http://aries.apache.org/xmlns/transactions/v1.0.0">
|
||||||
|
|
||||||
|
<!-- This gets the container-managed EntityManager and injects it into the DataPointServiceImpl bean. -->
|
||||||
|
<bean id="dpService" class="org.hibernate.osgitest.DataPointServiceImpl">
|
||||||
|
<jpa:context unitname="managed-jpa" property="entityManager"/>
|
||||||
|
<tx:transaction method="*" value="Required"/>
|
||||||
|
</bean>
|
||||||
|
<service ref="dpService" interface="org.hibernate.osgitest.DataPointService" />
|
||||||
|
|
||||||
|
<!-- This demonstrates how to register your custom implementations of Hibernate extension points, such as
|
||||||
|
Integrator and TypeContributor. -->
|
||||||
|
<!-- <bean id="integrator" class="your.package.IntegratorImpl"/>
|
||||||
|
<service ref="integrator" interface="org.hibernate.integrator.spi.Integrator"/>
|
||||||
|
<bean id="typeContributor" class="your.package.TypeContributorImpl"/>
|
||||||
|
<service ref="typeContributor" interface="org.hibernate.metamodel.spi.TypeContributor"/> -->
|
||||||
|
|
||||||
|
<!-- This bundle makes use of Karaf commands to demonstrate core persistence operations. Feel free to remove it. -->
|
||||||
|
<command-bundle xmlns="http://karaf.apache.org/xmlns/shell/v1.1.0">
|
||||||
|
<command name="dp/add">
|
||||||
|
<action class="org.hibernate.osgitest.command.AddCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp/getAll">
|
||||||
|
<action class="org.hibernate.osgitest.command.GetAllCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp/deleteAll">
|
||||||
|
<action class="org.hibernate.osgitest.command.DeleteAllCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
</command-bundle>
|
||||||
|
</blueprint>
|
1
documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-jpa/.gitignore
vendored
Normal file
1
documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-jpa/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
/target
|
|
@ -0,0 +1,42 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<features>
|
||||||
|
<feature name="hibernate-test">
|
||||||
|
|
||||||
|
<feature>karaf-framework</feature>
|
||||||
|
|
||||||
|
<!-- JTA -->
|
||||||
|
<bundle start-level="30">mvn:org.apache.geronimo.specs/geronimo-jta_1.1_spec/1.1.1</bundle>
|
||||||
|
|
||||||
|
<!-- JPA -->
|
||||||
|
<bundle start-level="30">mvn:org.hibernate.javax.persistence/hibernate-jpa-2.1-api/1.0.0-SNAPSHOT</bundle>
|
||||||
|
|
||||||
|
<!-- Taken from Karaf-Tutorial -->
|
||||||
|
<bundle>mvn:commons-collections/commons-collections/3.2.1</bundle>
|
||||||
|
<bundle>mvn:commons-pool/commons-pool/1.5.4</bundle>
|
||||||
|
<bundle>mvn:commons-dbcp/commons-dbcp/1.4</bundle>
|
||||||
|
<bundle>mvn:commons-lang/commons-lang/2.6</bundle>
|
||||||
|
<bundle>wrap:mvn:net.sourceforge.serp/serp/1.13.1</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:com.h2database/h2/1.3.170</bundle>
|
||||||
|
|
||||||
|
<!-- These do not natively support OSGi, so using 3rd party bundles. -->
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.antlr/2.7.7_5</bundle>
|
||||||
|
<bundle>mvn:org.jboss.javassist/com.springsource.javassist/3.15.0.GA</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.specs/org.apache.servicemix.specs.jsr303-api-1.0.0/2.2.0</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.ant/1.8.2_2</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.dom4j/1.6.1_5</bundle>
|
||||||
|
|
||||||
|
<!-- These do not natively support OSGi, so wrap with BND. -->
|
||||||
|
<bundle>wrap:mvn:org.hibernate.common/hibernate-commons-annotations/4.0.2.Final</bundle>
|
||||||
|
<bundle>wrap:mvn:org.jboss/jandex/1.1.0.Alpha1</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:com.fasterxml/classmate/0.5.4</bundle>
|
||||||
|
<bundle>mvn:org.jboss.logging/jboss-logging/3.1.0.GA</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-core/4.3.0-SNAPSHOT</bundle>
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-entitymanager/4.3.0-SNAPSHOT</bundle>
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-osgi/4.3.0-SNAPSHOT</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:org.hibernate.osgi/unmanaged-jpa/1.0.0</bundle>
|
||||||
|
</feature>
|
||||||
|
</features>
|
|
@ -0,0 +1,75 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>org.hibernate.osgi</groupId>
|
||||||
|
<artifactId>unmanaged-jpa</artifactId>
|
||||||
|
<version>1.0.0</version>
|
||||||
|
<packaging>bundle</packaging>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.hibernate.javax.persistence</groupId>
|
||||||
|
<artifactId>hibernate-jpa-2.1-api</artifactId>
|
||||||
|
<version>1.0.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.osgi</groupId>
|
||||||
|
<artifactId>org.osgi.core</artifactId>
|
||||||
|
<version>4.3.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.osgi</groupId>
|
||||||
|
<artifactId>org.osgi.enterprise</artifactId>
|
||||||
|
<version>4.2.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.karaf.shell</groupId>
|
||||||
|
<artifactId>org.apache.karaf.shell.console</artifactId>
|
||||||
|
<version>2.3.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-entitymanager</artifactId>
|
||||||
|
<version>4.3.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.h2database</groupId>
|
||||||
|
<artifactId>h2</artifactId>
|
||||||
|
<version>1.3.170</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.felix</groupId>
|
||||||
|
<artifactId>maven-bundle-plugin</artifactId>
|
||||||
|
<extensions>true</extensions>
|
||||||
|
<configuration>
|
||||||
|
<instructions>
|
||||||
|
<Bundle-SymbolicName>org.hibernate.osgi.unmanaged-jpa</Bundle-SymbolicName>
|
||||||
|
<Bundle-Name>unmanaged-jpa</Bundle-Name>
|
||||||
|
<Bundle-Version>1.0.0</Bundle-Version>
|
||||||
|
<Export-Package>
|
||||||
|
org.hibernate.osgitest,
|
||||||
|
org.hibernate.osgitest.entity
|
||||||
|
</Export-Package>
|
||||||
|
<Import-Package>
|
||||||
|
org.apache.felix.service.command,
|
||||||
|
org.apache.felix.gogo.commands,
|
||||||
|
org.apache.karaf.shell.console,
|
||||||
|
org.apache.karaf.shell.commands,
|
||||||
|
org.h2,
|
||||||
|
javax.persistence;version="[1.0.0,2.1.0]",
|
||||||
|
<!-- Needed for proxying's Javassist enhancement during runtime -->
|
||||||
|
org.hibernate.proxy,
|
||||||
|
javassist.util.proxy,
|
||||||
|
*
|
||||||
|
</Import-Package>
|
||||||
|
</instructions>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
|
@ -0,0 +1,41 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
public interface DataPointService {
|
||||||
|
|
||||||
|
public void add(DataPoint dp);
|
||||||
|
|
||||||
|
public void update(DataPoint dp);
|
||||||
|
|
||||||
|
public DataPoint get(long id);
|
||||||
|
|
||||||
|
public List<DataPoint> getAll();
|
||||||
|
|
||||||
|
public void deleteAll();
|
||||||
|
}
|
|
@ -0,0 +1,76 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import javax.persistence.EntityManager;
|
||||||
|
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
public class DataPointServiceImpl implements DataPointService {
|
||||||
|
|
||||||
|
public void add(DataPoint dp) {
|
||||||
|
EntityManager em = HibernateUtil.getEntityManager();
|
||||||
|
em.getTransaction().begin();
|
||||||
|
em.persist( dp );
|
||||||
|
em.getTransaction().commit();
|
||||||
|
em.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void update(DataPoint dp) {
|
||||||
|
EntityManager em = HibernateUtil.getEntityManager();
|
||||||
|
em.getTransaction().begin();
|
||||||
|
em.merge( dp );
|
||||||
|
em.getTransaction().commit();
|
||||||
|
em.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public DataPoint get(long id) {
|
||||||
|
EntityManager em = HibernateUtil.getEntityManager();
|
||||||
|
em.getTransaction().begin();
|
||||||
|
DataPoint dp = (DataPoint) em.createQuery( "from DataPoint dp where dp.id=" + id ).getSingleResult();
|
||||||
|
em.getTransaction().commit();
|
||||||
|
em.close();
|
||||||
|
return dp;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<DataPoint> getAll() {
|
||||||
|
EntityManager em = HibernateUtil.getEntityManager();
|
||||||
|
em.getTransaction().begin();
|
||||||
|
List list = em.createQuery( "from DataPoint" ).getResultList();
|
||||||
|
em.getTransaction().commit();
|
||||||
|
em.close();
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteAll() {
|
||||||
|
EntityManager em = HibernateUtil.getEntityManager();
|
||||||
|
em.getTransaction().begin();
|
||||||
|
em.createQuery( "delete from DataPoint" ).executeUpdate();
|
||||||
|
em.getTransaction().commit();
|
||||||
|
em.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,57 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import javax.persistence.EntityManager;
|
||||||
|
import javax.persistence.EntityManagerFactory;
|
||||||
|
import javax.persistence.spi.PersistenceProvider;
|
||||||
|
|
||||||
|
import org.osgi.framework.Bundle;
|
||||||
|
import org.osgi.framework.BundleContext;
|
||||||
|
import org.osgi.framework.FrameworkUtil;
|
||||||
|
import org.osgi.framework.ServiceReference;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class HibernateUtil {
|
||||||
|
|
||||||
|
private static EntityManagerFactory emf;
|
||||||
|
|
||||||
|
public static EntityManager getEntityManager() {
|
||||||
|
return getEntityManagerFactory().createEntityManager();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static EntityManagerFactory getEntityManagerFactory() {
|
||||||
|
if ( emf == null ) {
|
||||||
|
Bundle thisBundle = FrameworkUtil.getBundle( HibernateUtil.class );
|
||||||
|
// Could get this by wiring up OsgiTestBundleActivator as well.
|
||||||
|
BundleContext context = thisBundle.getBundleContext();
|
||||||
|
|
||||||
|
ServiceReference serviceReference = context.getServiceReference( PersistenceProvider.class.getName() );
|
||||||
|
PersistenceProvider persistenceProvider = (PersistenceProvider) context.getService( serviceReference );
|
||||||
|
|
||||||
|
emf = persistenceProvider.createEntityManagerFactory( "unmanaged-jpa", null );
|
||||||
|
}
|
||||||
|
return emf;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "addJPA")
|
||||||
|
public class AddCommand implements Action {
|
||||||
|
@Argument(index=0, name="Name", required=true, description="Name", multiValued=false)
|
||||||
|
String name;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
DataPoint dp = new DataPoint();
|
||||||
|
dp.setName( name );
|
||||||
|
dpService.add( dp );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "deleteAllJPA")
|
||||||
|
public class DeleteAllCommand implements Action {
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
dpService.deleteAll();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "getAllJPA")
|
||||||
|
public class GetAllCommand implements Action {
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
List<DataPoint> dps = dpService.getAll();
|
||||||
|
for (DataPoint dp : dps) {
|
||||||
|
System.out.println(dp.getId() + ", " + dp.getName());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,47 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "updateJPA")
|
||||||
|
public class UpdateCommand implements Action {
|
||||||
|
@Argument(index=0, name="Id", required=true, description="Id", multiValued=false)
|
||||||
|
String id;
|
||||||
|
|
||||||
|
@Argument(index=1, name="Name", required=true, description="Name", multiValued=false)
|
||||||
|
String name;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
DataPoint dp = dpService.get( Long.valueOf( id ) );
|
||||||
|
dp.setName( name );
|
||||||
|
dpService.update( dp );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.entity;
|
||||||
|
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
@Entity
|
||||||
|
public class DataPoint {
|
||||||
|
@Id
|
||||||
|
@GeneratedValue
|
||||||
|
private long id;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
public long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<persistence xmlns="http://java.sun.com/xml/ns/persistence"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
version="1.0">
|
||||||
|
<persistence-unit name="unmanaged-jpa">
|
||||||
|
<class>org.hibernate.osgitest.entity.DataPoint</class>
|
||||||
|
<exclude-unlisted-classes>true</exclude-unlisted-classes>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<property name="hibernate.connection.driver_class" value="org.h2.Driver"/>
|
||||||
|
<property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect"/>
|
||||||
|
<property name="hibernate.connection.url" value="jdbc:h2:mem:db1;DB_CLOSE_DELAY=-1;MVCC=TRUE"/>
|
||||||
|
<property name="hibernate.connection.username" value="sa"/>
|
||||||
|
<property name="hibernate.connection.password" value=""/>
|
||||||
|
<property name="hibernate.hbm2ddl.auto" value="create-drop"/>
|
||||||
|
</properties>
|
||||||
|
</persistence-unit>
|
||||||
|
</persistence>
|
|
@ -0,0 +1,57 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
-->
|
||||||
|
<blueprint default-activation="eager"
|
||||||
|
xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
|
||||||
|
<bean id="dpService" class="org.hibernate.osgitest.DataPointServiceImpl"/>
|
||||||
|
<service ref="dpService" interface="org.hibernate.osgitest.DataPointService" />
|
||||||
|
|
||||||
|
<!-- This demonstrates how to register your custom implementations of Hibernate extension points, such as
|
||||||
|
Integrator and TypeContributor. -->
|
||||||
|
<!-- <bean id="integrator" class="your.package.IntegratorImpl"/>
|
||||||
|
<service ref="integrator" interface="org.hibernate.integrator.spi.Integrator"/>
|
||||||
|
<bean id="typeContributor" class="your.package.TypeContributorImpl"/>
|
||||||
|
<service ref="typeContributor" interface="org.hibernate.metamodel.spi.TypeContributor"/> -->
|
||||||
|
|
||||||
|
<!-- This bundle makes use of Karaf commands to demonstrate core persistence operations. Feel free to remove it. -->
|
||||||
|
<command-bundle xmlns="http://karaf.apache.org/xmlns/shell/v1.1.0">
|
||||||
|
<command name="dp:add">
|
||||||
|
<action class="org.hibernate.osgitest.command.AddCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:update">
|
||||||
|
<action class="org.hibernate.osgitest.command.UpdateCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:getAll">
|
||||||
|
<action class="org.hibernate.osgitest.command.GetAllCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:deleteAll">
|
||||||
|
<action class="org.hibernate.osgitest.command.DeleteAllCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
</command-bundle>
|
||||||
|
</blueprint>
|
1
documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native/.gitignore
vendored
Normal file
1
documentation/src/main/docbook/quickstart/tutorials/osgi/unmanaged-native/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
/target
|
|
@ -0,0 +1,72 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<features>
|
||||||
|
<feature name="hibernate-test">
|
||||||
|
|
||||||
|
<feature>karaf-framework</feature>
|
||||||
|
|
||||||
|
<!-- JTA -->
|
||||||
|
<bundle start-level="30">mvn:org.apache.geronimo.specs/geronimo-jta_1.1_spec/1.1.1</bundle>
|
||||||
|
|
||||||
|
<!-- JPA -->
|
||||||
|
<bundle start-level="30">mvn:org.hibernate.javax.persistence/hibernate-jpa-2.1-api/1.0.0-SNAPSHOT</bundle>
|
||||||
|
|
||||||
|
<!-- Taken from Karaf-Tutorial -->
|
||||||
|
<bundle>mvn:commons-collections/commons-collections/3.2.1</bundle>
|
||||||
|
<bundle>mvn:commons-pool/commons-pool/1.5.4</bundle>
|
||||||
|
<bundle>mvn:commons-dbcp/commons-dbcp/1.4</bundle>
|
||||||
|
<bundle>mvn:commons-lang/commons-lang/2.6</bundle>
|
||||||
|
<bundle>wrap:mvn:net.sourceforge.serp/serp/1.13.1</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:com.h2database/h2/1.3.170</bundle>
|
||||||
|
|
||||||
|
<!-- These do not natively support OSGi, so using 3rd party bundles. -->
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.antlr/2.7.7_5</bundle>
|
||||||
|
<bundle>mvn:org.jboss.javassist/com.springsource.javassist/3.15.0.GA</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.specs/org.apache.servicemix.specs.jsr303-api-1.0.0/2.2.0</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.ant/1.8.2_2</bundle>
|
||||||
|
<bundle>mvn:org.apache.servicemix.bundles/org.apache.servicemix.bundles.dom4j/1.6.1_5</bundle>
|
||||||
|
|
||||||
|
<!-- These do not natively support OSGi, so wrap with BND. -->
|
||||||
|
<bundle>wrap:mvn:org.hibernate.common/hibernate-commons-annotations/4.0.2.Final</bundle>
|
||||||
|
<bundle>wrap:mvn:org.jboss/jandex/1.1.0.Alpha1</bundle>
|
||||||
|
<!-- Optional. Needed to test C3P0 connection pools. -->
|
||||||
|
<!-- <bundle>wrap:mvn:c3p0/c3p0/0.9.1</bundle> -->
|
||||||
|
<!-- Optional. Needed to test Proxool connection pools. -->
|
||||||
|
<!-- <bundle>wrap:mvn:proxool/proxool/0.8.3</bundle> -->
|
||||||
|
<!-- Optional. Needed to test ehcache 2lc. -->
|
||||||
|
<!-- <bundle>wrap:mvn:net.sf.ehcache/ehcache-core/2.4.3</bundle> -->
|
||||||
|
|
||||||
|
<bundle>mvn:com.fasterxml/classmate/0.5.4</bundle>
|
||||||
|
<bundle>mvn:org.jboss.logging/jboss-logging/3.1.0.GA</bundle>
|
||||||
|
|
||||||
|
<!-- JACC is optional. -->
|
||||||
|
<!--<bundle>mvn:javax.servlet/javax.servlet-api/3.0.1</bundle>
|
||||||
|
<bundle>mvn:org.jboss.spec.javax.security.jacc/jboss-jacc-api_1.4_spec/1.0.2.Final</bundle>-->
|
||||||
|
|
||||||
|
<!-- hibernate-validator is optional. -->
|
||||||
|
<!--<bundle>wrap:mvn:javax.validation/validation-api/1.0.0.GA</bundle>
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-validator/4.2.0.Final</bundle>-->
|
||||||
|
|
||||||
|
<!-- Optional. Needed to test infinispan 2lc. -->
|
||||||
|
<!-- IMPORTANT: Infinispan requires the JRE sun.misc package. You
|
||||||
|
MUST enable this in your OSGi container. For Karaf, add
|
||||||
|
"org.osgi.framework.system.packages.extra=sun.misc" to etc/config.properties -->
|
||||||
|
<!-- <bundle>wrap:mvn:org.jboss.marshalling/jboss-marshalling/1.3.17.GA</bundle>
|
||||||
|
<bundle>wrap:mvn:org.jboss.marshalling/jboss-marshalling-river/1.3.17.GA</bundle>
|
||||||
|
<bundle>wrap:mvn:org.jboss/staxmapper/1.1.0.Final</bundle>
|
||||||
|
<bundle>mvn:org.jgroups/jgroups/3.2.8.Final</bundle>
|
||||||
|
<bundle>mvn:org.infinispan/infinispan-core/5.2.0.Beta3</bundle> -->
|
||||||
|
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-core/4.3.0-SNAPSHOT</bundle>
|
||||||
|
<!-- TODO: Shouldn't need this, but hibernate-osgi's activator is a catch-all for SF and EMF. -->
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-entitymanager/4.3.0-SNAPSHOT</bundle>
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-envers/4.3.0-SNAPSHOT</bundle>
|
||||||
|
<!-- <bundle>mvn:org.hibernate/hibernate-c3p0/4.3.0-SNAPSHOT</bundle> -->
|
||||||
|
<!-- <bundle>mvn:org.hibernate/hibernate-proxool/4.3.0-SNAPSHOT</bundle> -->
|
||||||
|
<!-- <bundle>mvn:org.hibernate/hibernate-ehcache/4.3.0-SNAPSHOT</bundle> -->
|
||||||
|
<!-- <bundle>mvn:org.hibernate/hibernate-infinispan/4.3.0-SNAPSHOT</bundle> -->
|
||||||
|
<bundle>mvn:org.hibernate/hibernate-osgi/4.3.0-SNAPSHOT</bundle>
|
||||||
|
|
||||||
|
<bundle>mvn:org.hibernate.osgi/unmanaged-native/1.0.0</bundle>
|
||||||
|
</feature>
|
||||||
|
</features>
|
|
@ -0,0 +1,83 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>org.hibernate.osgi</groupId>
|
||||||
|
<artifactId>unmanaged-native</artifactId>
|
||||||
|
<version>1.0.0</version>
|
||||||
|
<packaging>bundle</packaging>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.hibernate.javax.persistence</groupId>
|
||||||
|
<artifactId>hibernate-jpa-2.1-api</artifactId>
|
||||||
|
<version>1.0.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.osgi</groupId>
|
||||||
|
<artifactId>org.osgi.core</artifactId>
|
||||||
|
<version>4.3.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.osgi</groupId>
|
||||||
|
<artifactId>org.osgi.enterprise</artifactId>
|
||||||
|
<version>4.2.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.karaf.shell</groupId>
|
||||||
|
<artifactId>org.apache.karaf.shell.console</artifactId>
|
||||||
|
<version>2.3.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>4.3.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-envers</artifactId>
|
||||||
|
<version>4.3.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.h2database</groupId>
|
||||||
|
<artifactId>h2</artifactId>
|
||||||
|
<version>1.3.170</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.felix</groupId>
|
||||||
|
<artifactId>maven-bundle-plugin</artifactId>
|
||||||
|
<extensions>true</extensions>
|
||||||
|
<configuration>
|
||||||
|
<instructions>
|
||||||
|
<Bundle-SymbolicName>org.hibernate.osgi.unmanaged-native</Bundle-SymbolicName>
|
||||||
|
<Bundle-Name>unmanaged-native</Bundle-Name>
|
||||||
|
<Bundle-Version>1.0.0</Bundle-Version>
|
||||||
|
<Export-Package>
|
||||||
|
org.hibernate.osgitest,
|
||||||
|
org.hibernate.osgitest.entity
|
||||||
|
</Export-Package>
|
||||||
|
<Import-Package>
|
||||||
|
org.apache.felix.service.command,
|
||||||
|
org.apache.felix.gogo.commands,
|
||||||
|
org.apache.karaf.shell.console,
|
||||||
|
org.apache.karaf.shell.commands,
|
||||||
|
org.h2,
|
||||||
|
org.hibernate,
|
||||||
|
org.hibernate.cfg,
|
||||||
|
org.hibernate.service,
|
||||||
|
javax.persistence;version="[1.0.0,2.1.0]",
|
||||||
|
<!-- Needed for proxying's Javassist enhancement during runtime -->
|
||||||
|
org.hibernate.proxy,
|
||||||
|
javassist.util.proxy,
|
||||||
|
*
|
||||||
|
</Import-Package>
|
||||||
|
</instructions>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
|
@ -0,0 +1,47 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.hibernate.envers.DefaultRevisionEntity;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
public interface DataPointService {
|
||||||
|
|
||||||
|
public void add(DataPoint dp);
|
||||||
|
|
||||||
|
public void update(DataPoint dp);
|
||||||
|
|
||||||
|
public DataPoint get(long id);
|
||||||
|
|
||||||
|
public DataPoint load(long id);
|
||||||
|
|
||||||
|
public List<DataPoint> getAll();
|
||||||
|
|
||||||
|
public Map<Number, DefaultRevisionEntity> getRevisions(long id);
|
||||||
|
|
||||||
|
public void deleteAll();
|
||||||
|
}
|
|
@ -0,0 +1,102 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.hibernate.Hibernate;
|
||||||
|
import org.hibernate.Session;
|
||||||
|
import org.hibernate.criterion.Restrictions;
|
||||||
|
import org.hibernate.envers.AuditReader;
|
||||||
|
import org.hibernate.envers.AuditReaderFactory;
|
||||||
|
import org.hibernate.envers.DefaultRevisionEntity;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
public class DataPointServiceImpl implements DataPointService {
|
||||||
|
|
||||||
|
public void add(DataPoint dp) {
|
||||||
|
Session s = HibernateUtil.getSession();
|
||||||
|
s.getTransaction().begin();
|
||||||
|
s.persist( dp );
|
||||||
|
s.getTransaction().commit();
|
||||||
|
s.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void update(DataPoint dp) {
|
||||||
|
Session s = HibernateUtil.getSession();
|
||||||
|
s.getTransaction().begin();
|
||||||
|
s.update( dp );
|
||||||
|
s.getTransaction().commit();
|
||||||
|
s.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public DataPoint get(long id) {
|
||||||
|
Session s = HibernateUtil.getSession();
|
||||||
|
s.getTransaction().begin();
|
||||||
|
DataPoint dp = (DataPoint) s.createCriteria( DataPoint.class ).add(
|
||||||
|
Restrictions.eq( "id", id ) ).uniqueResult();
|
||||||
|
s.getTransaction().commit();
|
||||||
|
s.close();
|
||||||
|
return dp;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test lazy loading (mainly to make sure the proxy classes work in OSGi)
|
||||||
|
public DataPoint load(long id) {
|
||||||
|
Session s = HibernateUtil.getSession();
|
||||||
|
s.getTransaction().begin();
|
||||||
|
DataPoint dp = (DataPoint) s.load( DataPoint.class, new Long(id) );
|
||||||
|
// initialize
|
||||||
|
dp.getName();
|
||||||
|
s.getTransaction().commit();
|
||||||
|
s.close();
|
||||||
|
return dp;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<DataPoint> getAll() {
|
||||||
|
Session s = HibernateUtil.getSession();
|
||||||
|
s.getTransaction().begin();
|
||||||
|
List list = s.createQuery( "from DataPoint" ).list();
|
||||||
|
s.getTransaction().commit();
|
||||||
|
s.close();
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<Number, DefaultRevisionEntity> getRevisions(long id) {
|
||||||
|
Session s = HibernateUtil.getSession();
|
||||||
|
AuditReader reader = AuditReaderFactory.get(s);
|
||||||
|
List<Number> revisionNums = reader.getRevisions( DataPoint.class, id );
|
||||||
|
return reader.findRevisions( DefaultRevisionEntity.class, new HashSet<Number>(revisionNums) );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteAll() {
|
||||||
|
Session s = HibernateUtil.getSession();
|
||||||
|
s.getTransaction().begin();
|
||||||
|
s.createQuery( "delete from DataPoint" ).executeUpdate();
|
||||||
|
s.getTransaction().commit();
|
||||||
|
s.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest;
|
||||||
|
|
||||||
|
import org.hibernate.Session;
|
||||||
|
import org.hibernate.SessionFactory;
|
||||||
|
import org.osgi.framework.Bundle;
|
||||||
|
import org.osgi.framework.BundleContext;
|
||||||
|
import org.osgi.framework.FrameworkUtil;
|
||||||
|
import org.osgi.framework.ServiceReference;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class HibernateUtil {
|
||||||
|
|
||||||
|
private static SessionFactory sf;
|
||||||
|
|
||||||
|
public static Session getSession() {
|
||||||
|
return getSessionFactory().openSession();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static SessionFactory getSessionFactory() {
|
||||||
|
if ( sf == null ) {
|
||||||
|
Bundle thisBundle = FrameworkUtil.getBundle( HibernateUtil.class );
|
||||||
|
// Could get this by wiring up OsgiTestBundleActivator as well.
|
||||||
|
BundleContext context = thisBundle.getBundleContext();
|
||||||
|
|
||||||
|
ServiceReference sr = context.getServiceReference( SessionFactory.class.getName() );
|
||||||
|
sf = (SessionFactory) context.getService( sr );
|
||||||
|
}
|
||||||
|
return sf;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "add")
|
||||||
|
public class AddCommand implements Action {
|
||||||
|
@Argument(index=0, name="Name", required=true, description="Name", multiValued=false)
|
||||||
|
String name;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
DataPoint dp = new DataPoint();
|
||||||
|
dp.setName( name );
|
||||||
|
dpService.add( dp );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "deleteAll")
|
||||||
|
public class DeleteAllCommand implements Action {
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
dpService.deleteAll();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "getAll")
|
||||||
|
public class GetAllCommand implements Action {
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
List<DataPoint> dps = dpService.getAll();
|
||||||
|
for (DataPoint dp : dps) {
|
||||||
|
System.out.println(dp.getId() + ", " + dp.getName());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "get")
|
||||||
|
public class GetCommand implements Action {
|
||||||
|
@Argument(index = 0, name = "Id", required = true, description = "Id", multiValued = false)
|
||||||
|
String id;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
DataPoint dp = dpService.get( Long.valueOf( id ) );
|
||||||
|
System.out.println( dp.getId() + ", " + dp.getName() );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.envers.DefaultRevisionEntity;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "getRevisions")
|
||||||
|
public class GetRevisionsCommand implements Action {
|
||||||
|
@Argument(index=0, name="Id", required=true, description="Id", multiValued=false)
|
||||||
|
String id;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
Map<Number, DefaultRevisionEntity> revisions = dpService.getRevisions(Long.valueOf( id ));
|
||||||
|
for (Number revisionNum : revisions.keySet()) {
|
||||||
|
DefaultRevisionEntity dre = revisions.get( revisionNum );
|
||||||
|
System.out.println(revisionNum + ": " + dre.getId() + ", " + dre.getTimestamp());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "load")
|
||||||
|
public class LoadCommand implements Action {
|
||||||
|
@Argument(index = 0, name = "Id", required = true, description = "Id", multiValued = false)
|
||||||
|
String id;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
DataPoint dp = dpService.load( Long.valueOf( id ) );
|
||||||
|
System.out.println( dp.getId() + ", " + dp.getName() );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,47 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.command;
|
||||||
|
|
||||||
|
import org.apache.felix.gogo.commands.Action;
|
||||||
|
import org.apache.felix.gogo.commands.Argument;
|
||||||
|
import org.apache.felix.gogo.commands.Command;
|
||||||
|
import org.apache.felix.service.command.CommandSession;
|
||||||
|
import org.hibernate.osgitest.DataPointService;
|
||||||
|
import org.hibernate.osgitest.entity.DataPoint;
|
||||||
|
|
||||||
|
@Command(scope = "dp", name = "update")
|
||||||
|
public class UpdateCommand implements Action {
|
||||||
|
@Argument(index=0, name="Id", required=true, description="Id", multiValued=false)
|
||||||
|
String id;
|
||||||
|
|
||||||
|
@Argument(index=1, name="Name", required=true, description="Name", multiValued=false)
|
||||||
|
String name;
|
||||||
|
|
||||||
|
private DataPointService dpService;
|
||||||
|
|
||||||
|
public void setDpService(DataPointService dpService) {
|
||||||
|
this.dpService = dpService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object execute(CommandSession session) throws Exception {
|
||||||
|
DataPoint dp = dpService.get( Long.valueOf( id ) );
|
||||||
|
dp.setName( name );
|
||||||
|
dpService.update( dp );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* JBoss, Home of Professional Open Source
|
||||||
|
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
|
||||||
|
* as indicated by the @authors tag. All rights reserved.
|
||||||
|
* See the copyright.txt in the distribution for a
|
||||||
|
* full listing of individual contributors.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use,
|
||||||
|
* modify, copy, or redistribute it subject to the terms and conditions
|
||||||
|
* of the GNU Lesser General Public License, v. 2.1.
|
||||||
|
* This program is distributed in the hope that it will be useful, but WITHOUT A
|
||||||
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License,
|
||||||
|
* v.2.1 along with this distribution; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||||
|
* MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
package org.hibernate.osgitest.entity;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
|
||||||
|
import org.hibernate.envers.Audited;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
@Entity
|
||||||
|
@Audited
|
||||||
|
public class DataPoint implements Serializable {
|
||||||
|
@Id
|
||||||
|
@GeneratedValue
|
||||||
|
private long id;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
public long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,72 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
-->
|
||||||
|
<blueprint default-activation="eager"
|
||||||
|
xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
|
||||||
|
<bean id="dpService" class="org.hibernate.osgitest.DataPointServiceImpl"/>
|
||||||
|
<service ref="dpService" interface="org.hibernate.osgitest.DataPointService" />
|
||||||
|
|
||||||
|
<!-- This demonstrates how to register your custom implementations of Hibernate extension points, such as
|
||||||
|
Integrator and TypeContributor. -->
|
||||||
|
<!-- <bean id="integrator" class="your.package.IntegratorImpl"/>
|
||||||
|
<service ref="integrator" interface="org.hibernate.integrator.spi.Integrator"/>
|
||||||
|
<bean id="typeContributor" class="your.package.TypeContributorImpl"/>
|
||||||
|
<service ref="typeContributor" interface="org.hibernate.metamodel.spi.TypeContributor"/> -->
|
||||||
|
|
||||||
|
<!-- This bundle makes use of Karaf commands to demonstrate core persistence operations. Feel free to remove it. -->
|
||||||
|
<command-bundle xmlns="http://karaf.apache.org/xmlns/shell/v1.1.0">
|
||||||
|
<command name="dp:add">
|
||||||
|
<action class="org.hibernate.osgitest.command.AddCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:update">
|
||||||
|
<action class="org.hibernate.osgitest.command.UpdateCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:get">
|
||||||
|
<action class="org.hibernate.osgitest.command.GetCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:load">
|
||||||
|
<action class="org.hibernate.osgitest.command.LoadCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:getAll">
|
||||||
|
<action class="org.hibernate.osgitest.command.GetAllCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:getRevisions">
|
||||||
|
<action class="org.hibernate.osgitest.command.GetRevisionsCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
<command name="dp:deleteAll">
|
||||||
|
<action class="org.hibernate.osgitest.command.DeleteAllCommand">
|
||||||
|
<property name="dpService" ref="dpService"/>
|
||||||
|
</action>
|
||||||
|
</command>
|
||||||
|
</command-bundle>
|
||||||
|
</blueprint>
|
|
@ -0,0 +1,111 @@
|
||||||
|
<!--
|
||||||
|
~ Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
~
|
||||||
|
~ Copyright (c) 2007, Red Hat Middleware LLC or third-party contributors as
|
||||||
|
~ indicated by the @author tags or express copyright attribution
|
||||||
|
~ statements applied by the authors. All third-party contributions are
|
||||||
|
~ distributed under license by Red Hat Middleware LLC.
|
||||||
|
~
|
||||||
|
~ This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
|
~ copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
~ Lesser General Public License, as published by the Free Software Foundation.
|
||||||
|
~
|
||||||
|
~ This program is distributed in the hope that it will be useful,
|
||||||
|
~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||||
|
~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||||
|
~ for more details.
|
||||||
|
~
|
||||||
|
~ You should have received a copy of the GNU Lesser General Public License
|
||||||
|
~ along with this distribution; if not, write to:
|
||||||
|
~ Free Software Foundation, Inc.
|
||||||
|
~ 51 Franklin Street, Fifth Floor
|
||||||
|
~ Boston, MA 02110-1301 USA
|
||||||
|
-->
|
||||||
|
<ehcache>
|
||||||
|
|
||||||
|
<!-- Sets the path to the directory where cache .data files are created.
|
||||||
|
|
||||||
|
If the path is a Java System Property it is replaced by
|
||||||
|
its value in the running VM.
|
||||||
|
|
||||||
|
The following properties are translated:
|
||||||
|
user.home - User's home directory
|
||||||
|
user.dir - User's current working directory
|
||||||
|
java.io.tmpdir - Default temp file path -->
|
||||||
|
<diskStore path="./target/tmp"/>
|
||||||
|
|
||||||
|
|
||||||
|
<!--Default Cache configuration. These will applied to caches programmatically created through
|
||||||
|
the CacheManager.
|
||||||
|
|
||||||
|
The following attributes are required for defaultCache:
|
||||||
|
|
||||||
|
maxInMemory - Sets the maximum number of objects that will be created in memory
|
||||||
|
eternal - Sets whether elements are eternal. If eternal, timeouts are ignored and the element
|
||||||
|
is never expired.
|
||||||
|
timeToIdleSeconds - Sets the time to idle for an element before it expires. Is only used
|
||||||
|
if the element is not eternal. Idle time is now - last accessed time
|
||||||
|
timeToLiveSeconds - Sets the time to live for an element before it expires. Is only used
|
||||||
|
if the element is not eternal. TTL is now - creation time
|
||||||
|
overflowToDisk - Sets whether elements can overflow to disk when the in-memory cache
|
||||||
|
has reached the maxInMemory limit.
|
||||||
|
|
||||||
|
-->
|
||||||
|
<defaultCache
|
||||||
|
maxElementsInMemory="10000"
|
||||||
|
eternal="false"
|
||||||
|
timeToIdleSeconds="120"
|
||||||
|
timeToLiveSeconds="120"
|
||||||
|
overflowToDisk="true"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<!--Predefined caches. Add your cache configuration settings here.
|
||||||
|
If you do not have a configuration for your cache a WARNING will be issued when the
|
||||||
|
CacheManager starts
|
||||||
|
|
||||||
|
The following attributes are required for defaultCache:
|
||||||
|
|
||||||
|
name - Sets the name of the cache. This is used to identify the cache. It must be unique.
|
||||||
|
maxInMemory - Sets the maximum number of objects that will be created in memory
|
||||||
|
eternal - Sets whether elements are eternal. If eternal, timeouts are ignored and the element
|
||||||
|
is never expired.
|
||||||
|
timeToIdleSeconds - Sets the time to idle for an element before it expires. Is only used
|
||||||
|
if the element is not eternal. Idle time is now - last accessed time
|
||||||
|
timeToLiveSeconds - Sets the time to live for an element before it expires. Is only used
|
||||||
|
if the element is not eternal. TTL is now - creation time
|
||||||
|
overflowToDisk - Sets whether elements can overflow to disk when the in-memory cache
|
||||||
|
has reached the maxInMemory limit.
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Sample cache named sampleCache1
|
||||||
|
This cache contains a maximum in memory of 10000 elements, and will expire
|
||||||
|
an element if it is idle for more than 5 minutes and lives for more than
|
||||||
|
10 minutes.
|
||||||
|
|
||||||
|
If there are more than 10000 elements it will overflow to the
|
||||||
|
disk cache, which in this configuration will go to wherever java.io.tmp is
|
||||||
|
defined on your system. On a standard Linux system this will be /tmp"
|
||||||
|
-->
|
||||||
|
<cache name="sampleCache1"
|
||||||
|
maxElementsInMemory="10000"
|
||||||
|
eternal="false"
|
||||||
|
timeToIdleSeconds="300"
|
||||||
|
timeToLiveSeconds="600"
|
||||||
|
overflowToDisk="true"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<!-- Sample cache named sampleCache2
|
||||||
|
This cache contains 1000 elements. Elements will always be held in memory.
|
||||||
|
They are not expired. -->
|
||||||
|
<cache name="sampleCache2"
|
||||||
|
maxElementsInMemory="1000"
|
||||||
|
eternal="true"
|
||||||
|
timeToIdleSeconds="0"
|
||||||
|
timeToLiveSeconds="0"
|
||||||
|
overflowToDisk="false"
|
||||||
|
/> -->
|
||||||
|
|
||||||
|
<!-- Place configuration for your caches following -->
|
||||||
|
|
||||||
|
</ehcache>
|
|
@ -0,0 +1,46 @@
|
||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
<!DOCTYPE hibernate-configuration PUBLIC
|
||||||
|
"-//Hibernate/Hibernate Configuration DTD//EN"
|
||||||
|
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
|
||||||
|
|
||||||
|
<hibernate-configuration>
|
||||||
|
<session-factory>
|
||||||
|
<property name="hibernate.connection.driver_class">org.h2.Driver</property>
|
||||||
|
<property name="hibernate.connection.url">jdbc:h2:mem:db1;DB_CLOSE_DELAY=-1;MVCC=TRUE</property>
|
||||||
|
<property name="hibernate.connection.username">sa</property>
|
||||||
|
<property name="hibernate.connection.password"></property>
|
||||||
|
<property name="hibernate.dialect">org.hibernate.dialect.H2Dialect</property>
|
||||||
|
<property name="hibernate.hbm2ddl.auto">create-drop</property>
|
||||||
|
|
||||||
|
<!-- <property name="hibernate.connection.pool_size">5</property>
|
||||||
|
<property name="hibernate.c3p0.min_size">50</property>
|
||||||
|
<property name="hibernate.c3p0.max_size">800</property>
|
||||||
|
<property name="hibernate.c3p0.max_statements">50</property>
|
||||||
|
<property name="hibernate.jdbc.batch_size">10</property>
|
||||||
|
<property name="hibernate.c3p0.timeout">300</property>
|
||||||
|
<property name="hibernate.c3p0.idle_test_period">3000</property>
|
||||||
|
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property> -->
|
||||||
|
|
||||||
|
<!-- <property name="hibernate.connection.pool_size">5</property>
|
||||||
|
<property name="hibernate.jdbc.batch_size">10</property>
|
||||||
|
<property name="hibernate.connection.provider_class">proxool</property>
|
||||||
|
<property name="hibernate.proxool.properties">pool-one.properties</property>
|
||||||
|
<property name="hibernate.proxool.pool_alias">pool-one</property> -->
|
||||||
|
|
||||||
|
<!-- <property name="hibernate.cache.region_prefix">hibernate.test</property>
|
||||||
|
<property name="cache.use_query_cache">true</property>
|
||||||
|
<property name="cache.use_second_level_cache">true</property>
|
||||||
|
<property name="cache.use_structured_entries">true</property>
|
||||||
|
<property name="cache.region.factory_class">org.hibernate.cache.EhCacheRegionFactory</property>
|
||||||
|
<property name="net.sf.ehcache.configurationResourceName">file:///[PATH]/unmanaged-jpa/src/main/resources/ehcache.xml</property> -->
|
||||||
|
|
||||||
|
<!-- <property name="hibernate.cache.region_prefix">hibernate.test</property>
|
||||||
|
<property name="cache.use_query_cache">true</property>
|
||||||
|
<property name="cache.use_second_level_cache">true</property>
|
||||||
|
<property name="cache.use_structured_entries">true</property>
|
||||||
|
<property name="cache.region.factory_class">org.hibernate.cache.infinispan.InfinispanRegionFactory</property> -->
|
||||||
|
|
||||||
|
<mapping class="org.hibernate.osgitest.entity.DataPoint"/>
|
||||||
|
</session-factory>
|
||||||
|
|
||||||
|
</hibernate-configuration>
|
|
@ -0,0 +1,7 @@
|
||||||
|
jdbc-0.proxool.alias=pool-one
|
||||||
|
jdbc-0.proxool.driver-url=jdbc:h2:mem:db1;DB_CLOSE_DELAY=-1;MVCC=TRUE
|
||||||
|
jdbc-0.proxool.driver-class=org.h2.Driver
|
||||||
|
jdbc-0.user=sa
|
||||||
|
jdbc-0.password=
|
||||||
|
jdbc-0.proxool.maximum-connection-count=2
|
||||||
|
jdbc-0.proxool.house-keeping-test-sql=select CURRENT_DATE
|
|
@ -44,6 +44,7 @@
|
||||||
<module>annotations</module>
|
<module>annotations</module>
|
||||||
<module>entitymanager</module>
|
<module>entitymanager</module>
|
||||||
<module>envers</module>
|
<module>envers</module>
|
||||||
|
<module>osgi</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
|
@ -64,7 +64,14 @@ jar {
|
||||||
// Temporarily support JTA 1.1 -- Karaf and other frameworks still
|
// Temporarily support JTA 1.1 -- Karaf and other frameworks still
|
||||||
// use it. Without this, the plugin generates [1.2,2).
|
// use it. Without this, the plugin generates [1.2,2).
|
||||||
// build.gradle adds javax.transaction for all modules
|
// build.gradle adds javax.transaction for all modules
|
||||||
'javax.transaction.xa;version="[1.1,2)"'
|
'javax.transaction.xa;version="[1.1,2)"',
|
||||||
|
// optionals
|
||||||
|
'javax.management;resolution:=optional',
|
||||||
|
'javax.naming.event;resolution:=optional',
|
||||||
|
'javax.naming.spi;resolution:=optional',
|
||||||
|
'org.apache.tools.ant;resolution:=optional',
|
||||||
|
'org.apache.tools.ant.taskdefs;resolution:=optional',
|
||||||
|
'org.apache.tools.ant.types;resolution:=optional'
|
||||||
|
|
||||||
// TODO: Uncomment once EntityManagerFactoryBuilderImpl no longer
|
// TODO: Uncomment once EntityManagerFactoryBuilderImpl no longer
|
||||||
// uses ClassLoaderServiceImpl.
|
// uses ClassLoaderServiceImpl.
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
|
||||||
|
* indicated by the @author tags or express copyright attribution
|
||||||
|
* statements applied by the authors. All third-party contributions are
|
||||||
|
* distributed under license by Red Hat Inc.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
* Lesser General Public License, as published by the Free Software Foundation.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||||
|
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||||
|
* for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License
|
||||||
|
* along with this distribution; if not, write to:
|
||||||
|
* Free Software Foundation, Inc.
|
||||||
|
* 51 Franklin Street, Fifth Floor
|
||||||
|
* Boston, MA 02110-1301 USA
|
||||||
|
*/
|
||||||
|
package org.hibernate.action.internal;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a collection is extra lazy and has queued ops, we still need to
|
||||||
|
* process them. Ex: OneToManyPersister needs to insert indexes for List
|
||||||
|
* collections. See HHH-8083.
|
||||||
|
*
|
||||||
|
* @author Brett Meyer
|
||||||
|
*/
|
||||||
|
public final class QueuedOperationCollectionAction extends CollectionAction {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a CollectionUpdateAction
|
||||||
|
*
|
||||||
|
* @param collection The collection to update
|
||||||
|
* @param persister The collection persister
|
||||||
|
* @param id The collection key
|
||||||
|
* @param session The session
|
||||||
|
*/
|
||||||
|
public QueuedOperationCollectionAction(
|
||||||
|
final PersistentCollection collection,
|
||||||
|
final CollectionPersister persister,
|
||||||
|
final Serializable id,
|
||||||
|
final SessionImplementor session) {
|
||||||
|
super( persister, collection, id, session );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
final Serializable id = getKey();
|
||||||
|
final SessionImplementor session = getSession();
|
||||||
|
final CollectionPersister persister = getPersister();
|
||||||
|
final PersistentCollection collection = getCollection();
|
||||||
|
|
||||||
|
persister.processQueuedOps( collection, id, session );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -272,7 +272,7 @@ public final class AnnotationBinder {
|
||||||
(List<NamedStoredProcedureQuery>) defaults.get( NamedStoredProcedureQuery.class );
|
(List<NamedStoredProcedureQuery>) defaults.get( NamedStoredProcedureQuery.class );
|
||||||
if ( annotations != null ) {
|
if ( annotations != null ) {
|
||||||
for ( NamedStoredProcedureQuery annotation : annotations ) {
|
for ( NamedStoredProcedureQuery annotation : annotations ) {
|
||||||
QueryBinder.bindNamedStoredProcedureQuery( annotation, mappings );
|
bindNamedStoredProcedureQuery( mappings, annotation, true );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -281,9 +281,7 @@ public final class AnnotationBinder {
|
||||||
(List<NamedStoredProcedureQueries>) defaults.get( NamedStoredProcedureQueries.class );
|
(List<NamedStoredProcedureQueries>) defaults.get( NamedStoredProcedureQueries.class );
|
||||||
if ( annotations != null ) {
|
if ( annotations != null ) {
|
||||||
for ( NamedStoredProcedureQueries annotation : annotations ) {
|
for ( NamedStoredProcedureQueries annotation : annotations ) {
|
||||||
for ( NamedStoredProcedureQuery queryAnnotation : annotation.value() ) {
|
bindNamedStoredProcedureQueries( mappings, annotation, true );
|
||||||
QueryBinder.bindNamedStoredProcedureQuery( queryAnnotation, mappings );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -394,24 +392,30 @@ public final class AnnotationBinder {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NamedStoredProcedureQuery handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// NamedStoredProcedureQuery handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
{
|
bindNamedStoredProcedureQuery( mappings, annotatedElement.getAnnotation( NamedStoredProcedureQuery.class ), false );
|
||||||
final NamedStoredProcedureQuery annotation = annotatedElement.getAnnotation( NamedStoredProcedureQuery.class );
|
|
||||||
if ( annotation != null ) {
|
|
||||||
QueryBinder.bindNamedStoredProcedureQuery( annotation, mappings );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NamedStoredProcedureQueries handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// NamedStoredProcedureQueries handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
{
|
bindNamedStoredProcedureQueries(
|
||||||
final NamedStoredProcedureQueries annotation = annotatedElement.getAnnotation( NamedStoredProcedureQueries.class );
|
mappings,
|
||||||
if ( annotation != null ) {
|
annotatedElement.getAnnotation( NamedStoredProcedureQueries.class ),
|
||||||
for ( NamedStoredProcedureQuery queryAnnotation : annotation.value() ) {
|
false
|
||||||
QueryBinder.bindNamedStoredProcedureQuery( queryAnnotation, mappings );
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void bindNamedStoredProcedureQueries(Mappings mappings, NamedStoredProcedureQueries annotation, boolean isDefault) {
|
||||||
|
if ( annotation != null ) {
|
||||||
|
for ( NamedStoredProcedureQuery queryAnnotation : annotation.value() ) {
|
||||||
|
bindNamedStoredProcedureQuery( mappings, queryAnnotation, isDefault );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void bindNamedStoredProcedureQuery(Mappings mappings, NamedStoredProcedureQuery annotation, boolean isDefault) {
|
||||||
|
if ( annotation != null ) {
|
||||||
|
QueryBinder.bindNamedStoredProcedureQuery( annotation, mappings, isDefault );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static IdGenerator buildIdGenerator(java.lang.annotation.Annotation ann, Mappings mappings) {
|
private static IdGenerator buildIdGenerator(java.lang.annotation.Annotation ann, Mappings mappings) {
|
||||||
IdGenerator idGen = new IdGenerator();
|
IdGenerator idGen = new IdGenerator();
|
||||||
if ( mappings.getSchemaName() != null ) {
|
if ( mappings.getSchemaName() != null ) {
|
||||||
|
|
|
@ -263,6 +263,8 @@ public class Configuration implements Serializable {
|
||||||
private Set<String> defaultNamedQueryNames;
|
private Set<String> defaultNamedQueryNames;
|
||||||
private Set<String> defaultNamedNativeQueryNames;
|
private Set<String> defaultNamedNativeQueryNames;
|
||||||
private Set<String> defaultSqlResultSetMappingNames;
|
private Set<String> defaultSqlResultSetMappingNames;
|
||||||
|
private Set<String> defaultNamedProcedure;
|
||||||
|
|
||||||
private Set<String> defaultNamedGenerators;
|
private Set<String> defaultNamedGenerators;
|
||||||
private Map<String, Properties> generatorTables;
|
private Map<String, Properties> generatorTables;
|
||||||
private Map<Table, List<UniqueConstraintHolder>> uniqueConstraintHoldersByTable;
|
private Map<Table, List<UniqueConstraintHolder>> uniqueConstraintHoldersByTable;
|
||||||
|
@ -303,7 +305,7 @@ public class Configuration implements Serializable {
|
||||||
namedSqlQueries = new HashMap<String,NamedSQLQueryDefinition>();
|
namedSqlQueries = new HashMap<String,NamedSQLQueryDefinition>();
|
||||||
sqlResultSetMappings = new HashMap<String, ResultSetMappingDefinition>();
|
sqlResultSetMappings = new HashMap<String, ResultSetMappingDefinition>();
|
||||||
namedEntityGraphMap = new HashMap<String, NamedEntityGraphDefinition>();
|
namedEntityGraphMap = new HashMap<String, NamedEntityGraphDefinition>();
|
||||||
|
namedProcedureCallMap = new HashMap<String, NamedProcedureCallDefinition>( );
|
||||||
typeDefs = new HashMap<String,TypeDef>();
|
typeDefs = new HashMap<String,TypeDef>();
|
||||||
filterDefinitions = new HashMap<String, FilterDefinition>();
|
filterDefinitions = new HashMap<String, FilterDefinition>();
|
||||||
fetchProfiles = new HashMap<String, FetchProfile>();
|
fetchProfiles = new HashMap<String, FetchProfile>();
|
||||||
|
@ -339,6 +341,7 @@ public class Configuration implements Serializable {
|
||||||
defaultNamedQueryNames = new HashSet<String>();
|
defaultNamedQueryNames = new HashSet<String>();
|
||||||
defaultNamedNativeQueryNames = new HashSet<String>();
|
defaultNamedNativeQueryNames = new HashSet<String>();
|
||||||
defaultSqlResultSetMappingNames = new HashSet<String>();
|
defaultSqlResultSetMappingNames = new HashSet<String>();
|
||||||
|
defaultNamedProcedure = new HashSet<String>( );
|
||||||
defaultNamedGenerators = new HashSet<String>();
|
defaultNamedGenerators = new HashSet<String>();
|
||||||
uniqueConstraintHoldersByTable = new HashMap<Table, List<UniqueConstraintHolder>>();
|
uniqueConstraintHoldersByTable = new HashMap<Table, List<UniqueConstraintHolder>>();
|
||||||
jpaIndexHoldersByTable = new HashMap<Table,List<JPAIndexHolder>>( );
|
jpaIndexHoldersByTable = new HashMap<Table,List<JPAIndexHolder>>( );
|
||||||
|
@ -2890,16 +2893,25 @@ public class Configuration implements Serializable {
|
||||||
public void addNamedProcedureCallDefinition(NamedProcedureCallDefinition definition)
|
public void addNamedProcedureCallDefinition(NamedProcedureCallDefinition definition)
|
||||||
throws DuplicateMappingException {
|
throws DuplicateMappingException {
|
||||||
final String name = definition.getRegisteredName();
|
final String name = definition.getRegisteredName();
|
||||||
final NamedProcedureCallDefinition previous = namedProcedureCallMap.put( name, definition );
|
if ( !defaultNamedProcedure.contains( name ) ) {
|
||||||
if ( previous != null ) {
|
final NamedProcedureCallDefinition previous = namedProcedureCallMap.put( name, definition );
|
||||||
throw new DuplicateMappingException( "named stored procedure query", name );
|
if ( previous != null ) {
|
||||||
|
throw new DuplicateMappingException( "named stored procedure query", name );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@Override
|
||||||
|
public void addDefaultNamedProcedureCallDefinition(NamedProcedureCallDefinition definition)
|
||||||
|
throws DuplicateMappingException {
|
||||||
|
addNamedProcedureCallDefinition( definition );
|
||||||
|
defaultNamedProcedure.add( definition.getRegisteredName() );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void addNamedEntityGraphDefintion(NamedEntityGraphDefinition definition)
|
public void addNamedEntityGraphDefintion(NamedEntityGraphDefinition definition)
|
||||||
throws DuplicateMappingException {
|
throws DuplicateMappingException {
|
||||||
final String name = definition.getRegisteredName();
|
final String name = definition.getRegisteredName();
|
||||||
|
|
||||||
final NamedEntityGraphDefinition previous = namedEntityGraphMap.put( name, definition );
|
final NamedEntityGraphDefinition previous = namedEntityGraphMap.put( name, definition );
|
||||||
if ( previous != null ) {
|
if ( previous != null ) {
|
||||||
throw new DuplicateMappingException( "NamedEntityGraph", name );
|
throw new DuplicateMappingException( "NamedEntityGraph", name );
|
||||||
|
|
|
@ -348,6 +348,17 @@ public interface Mappings {
|
||||||
*/
|
*/
|
||||||
public void addNamedProcedureCallDefinition(NamedProcedureCallDefinition definition) throws DuplicateMappingException;
|
public void addNamedProcedureCallDefinition(NamedProcedureCallDefinition definition) throws DuplicateMappingException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds metadata for a named stored procedure call to this repository.
|
||||||
|
*
|
||||||
|
* @param definition The procedure call information
|
||||||
|
*
|
||||||
|
* @throws DuplicateMappingException If a query already exists with that name.
|
||||||
|
*/
|
||||||
|
public void addDefaultNamedProcedureCallDefinition(NamedProcedureCallDefinition definition) throws DuplicateMappingException;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds metadata for a named entity graph to this repository
|
* Adds metadata for a named entity graph to this repository
|
||||||
*
|
*
|
||||||
|
|
|
@ -25,6 +25,8 @@ package org.hibernate.cfg.annotations;
|
||||||
|
|
||||||
import javax.persistence.NamedEntityGraph;
|
import javax.persistence.NamedEntityGraph;
|
||||||
|
|
||||||
|
import org.hibernate.internal.util.StringHelper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Models the definition of a {@link NamedEntityGraph} annotation
|
* Models the definition of a {@link NamedEntityGraph} annotation
|
||||||
*
|
*
|
||||||
|
@ -34,15 +36,17 @@ public class NamedEntityGraphDefinition {
|
||||||
private final NamedEntityGraph annotation;
|
private final NamedEntityGraph annotation;
|
||||||
private final String jpaEntityName;
|
private final String jpaEntityName;
|
||||||
private final String entityName;
|
private final String entityName;
|
||||||
|
private final String name;
|
||||||
|
|
||||||
public NamedEntityGraphDefinition(NamedEntityGraph annotation, String jpaEntityName, String entityName) {
|
public NamedEntityGraphDefinition(NamedEntityGraph annotation, String jpaEntityName, String entityName) {
|
||||||
this.annotation = annotation;
|
this.annotation = annotation;
|
||||||
this.jpaEntityName = jpaEntityName;
|
this.jpaEntityName = jpaEntityName;
|
||||||
this.entityName = entityName;
|
this.entityName = entityName;
|
||||||
|
this.name = StringHelper.isEmpty( annotation.name() ) ? jpaEntityName : annotation.name();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getRegisteredName() {
|
public String getRegisteredName() {
|
||||||
return jpaEntityName;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getJpaEntityName() {
|
public String getJpaEntityName() {
|
||||||
|
|
|
@ -25,25 +25,20 @@ package org.hibernate.cfg.annotations;
|
||||||
|
|
||||||
import javax.persistence.NamedStoredProcedureQuery;
|
import javax.persistence.NamedStoredProcedureQuery;
|
||||||
import javax.persistence.ParameterMode;
|
import javax.persistence.ParameterMode;
|
||||||
import javax.persistence.QueryHint;
|
|
||||||
import javax.persistence.StoredProcedureParameter;
|
import javax.persistence.StoredProcedureParameter;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.hibernate.LockMode;
|
|
||||||
import org.hibernate.MappingException;
|
import org.hibernate.MappingException;
|
||||||
import org.hibernate.engine.ResultSetMappingDefinition;
|
import org.hibernate.engine.ResultSetMappingDefinition;
|
||||||
import org.hibernate.engine.query.spi.sql.NativeSQLQueryReturn;
|
import org.hibernate.engine.query.spi.sql.NativeSQLQueryReturn;
|
||||||
import org.hibernate.engine.query.spi.sql.NativeSQLQueryRootReturn;
|
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.internal.SessionFactoryImpl;
|
import org.hibernate.internal.SessionFactoryImpl;
|
||||||
import org.hibernate.internal.util.StringHelper;
|
import org.hibernate.internal.util.StringHelper;
|
||||||
import org.hibernate.persister.entity.EntityPersister;
|
|
||||||
import org.hibernate.procedure.ProcedureCallMemento;
|
import org.hibernate.procedure.ProcedureCallMemento;
|
||||||
import org.hibernate.procedure.internal.ParameterStrategy;
|
import org.hibernate.procedure.internal.ParameterStrategy;
|
||||||
import org.hibernate.procedure.internal.ProcedureCallMementoImpl;
|
import org.hibernate.procedure.internal.ProcedureCallMementoImpl;
|
||||||
|
@ -65,7 +60,7 @@ public class NamedProcedureCallDefinition {
|
||||||
private final Class[] resultClasses;
|
private final Class[] resultClasses;
|
||||||
private final String[] resultSetMappings;
|
private final String[] resultSetMappings;
|
||||||
private final ParameterDefinitions parameterDefinitions;
|
private final ParameterDefinitions parameterDefinitions;
|
||||||
private final Map<String,Object> hints;
|
private final Map<String, Object> hints;
|
||||||
|
|
||||||
NamedProcedureCallDefinition(NamedStoredProcedureQuery annotation) {
|
NamedProcedureCallDefinition(NamedStoredProcedureQuery annotation) {
|
||||||
this.registeredName = annotation.name();
|
this.registeredName = annotation.name();
|
||||||
|
@ -73,7 +68,7 @@ public class NamedProcedureCallDefinition {
|
||||||
this.resultClasses = annotation.resultClasses();
|
this.resultClasses = annotation.resultClasses();
|
||||||
this.resultSetMappings = annotation.resultSetMappings();
|
this.resultSetMappings = annotation.resultSetMappings();
|
||||||
this.parameterDefinitions = new ParameterDefinitions( annotation.parameters() );
|
this.parameterDefinitions = new ParameterDefinitions( annotation.parameters() );
|
||||||
this.hints = extract( annotation.hints() );
|
this.hints = new QueryHintDefinition( annotation.hints() ).getHintsMap();
|
||||||
|
|
||||||
final boolean specifiesResultClasses = resultClasses != null && resultClasses.length > 0;
|
final boolean specifiesResultClasses = resultClasses != null && resultClasses.length > 0;
|
||||||
final boolean specifiesResultSetMappings = resultSetMappings != null && resultSetMappings.length > 0;
|
final boolean specifiesResultSetMappings = resultSetMappings != null && resultSetMappings.length > 0;
|
||||||
|
@ -88,17 +83,6 @@ public class NamedProcedureCallDefinition {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, Object> extract(QueryHint[] hints) {
|
|
||||||
if ( hints == null || hints.length == 0 ) {
|
|
||||||
return Collections.emptyMap();
|
|
||||||
}
|
|
||||||
final Map<String,Object> hintsMap = new HashMap<String, Object>();
|
|
||||||
for ( QueryHint hint : hints ) {
|
|
||||||
hintsMap.put( hint.name(), hint.value() );
|
|
||||||
}
|
|
||||||
return hintsMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRegisteredName() {
|
public String getRegisteredName() {
|
||||||
return registeredName;
|
return registeredName;
|
||||||
}
|
}
|
||||||
|
@ -201,7 +185,7 @@ public class NamedProcedureCallDefinition {
|
||||||
public List<ParameterMemento> toMementos(SessionFactoryImpl sessionFactory) {
|
public List<ParameterMemento> toMementos(SessionFactoryImpl sessionFactory) {
|
||||||
final List<ParameterMemento> mementos = new ArrayList<ParameterMemento>();
|
final List<ParameterMemento> mementos = new ArrayList<ParameterMemento>();
|
||||||
for ( ParameterDefinition definition : parameterDefinitions ) {
|
for ( ParameterDefinition definition : parameterDefinitions ) {
|
||||||
definition.toMemento( sessionFactory );
|
mementos.add(definition.toMemento( sessionFactory ));
|
||||||
}
|
}
|
||||||
return mementos;
|
return mementos;
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,13 +24,11 @@
|
||||||
package org.hibernate.cfg.annotations;
|
package org.hibernate.cfg.annotations;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import javax.persistence.LockModeType;
|
|
||||||
import javax.persistence.NamedNativeQueries;
|
import javax.persistence.NamedNativeQueries;
|
||||||
import javax.persistence.NamedNativeQuery;
|
import javax.persistence.NamedNativeQuery;
|
||||||
import javax.persistence.NamedQueries;
|
import javax.persistence.NamedQueries;
|
||||||
import javax.persistence.NamedQuery;
|
import javax.persistence.NamedQuery;
|
||||||
import javax.persistence.NamedStoredProcedureQuery;
|
import javax.persistence.NamedStoredProcedureQuery;
|
||||||
import javax.persistence.QueryHint;
|
|
||||||
import javax.persistence.SqlResultSetMapping;
|
import javax.persistence.SqlResultSetMapping;
|
||||||
import javax.persistence.SqlResultSetMappings;
|
import javax.persistence.SqlResultSetMappings;
|
||||||
|
|
||||||
|
@ -41,9 +39,9 @@ import org.hibernate.AssertionFailure;
|
||||||
import org.hibernate.CacheMode;
|
import org.hibernate.CacheMode;
|
||||||
import org.hibernate.FlushMode;
|
import org.hibernate.FlushMode;
|
||||||
import org.hibernate.LockMode;
|
import org.hibernate.LockMode;
|
||||||
import org.hibernate.LockOptions;
|
|
||||||
import org.hibernate.annotations.CacheModeType;
|
import org.hibernate.annotations.CacheModeType;
|
||||||
import org.hibernate.annotations.FlushModeType;
|
import org.hibernate.annotations.FlushModeType;
|
||||||
|
import org.hibernate.annotations.QueryHints;
|
||||||
import org.hibernate.cfg.BinderHelper;
|
import org.hibernate.cfg.BinderHelper;
|
||||||
import org.hibernate.cfg.Mappings;
|
import org.hibernate.cfg.Mappings;
|
||||||
import org.hibernate.cfg.NotYetImplementedException;
|
import org.hibernate.cfg.NotYetImplementedException;
|
||||||
|
@ -54,7 +52,6 @@ import org.hibernate.engine.spi.NamedQueryDefinitionBuilder;
|
||||||
import org.hibernate.engine.spi.NamedSQLQueryDefinition;
|
import org.hibernate.engine.spi.NamedSQLQueryDefinition;
|
||||||
import org.hibernate.engine.spi.NamedSQLQueryDefinitionBuilder;
|
import org.hibernate.engine.spi.NamedSQLQueryDefinitionBuilder;
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.internal.util.LockModeConverter;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Query binder
|
* Query binder
|
||||||
|
@ -70,19 +67,19 @@ public abstract class QueryBinder {
|
||||||
throw new AnnotationException( "A named query must have a name when used in class or package level" );
|
throw new AnnotationException( "A named query must have a name when used in class or package level" );
|
||||||
}
|
}
|
||||||
//EJBQL Query
|
//EJBQL Query
|
||||||
QueryHint[] hints = queryAnn.hints();
|
QueryHintDefinition hints = new QueryHintDefinition( queryAnn.hints() );
|
||||||
String queryName = queryAnn.query();
|
String queryName = queryAnn.query();
|
||||||
NamedQueryDefinition queryDefinition = new NamedQueryDefinitionBuilder( queryAnn.name() )
|
NamedQueryDefinition queryDefinition = new NamedQueryDefinitionBuilder( queryAnn.name() )
|
||||||
.setLockOptions( determineLockOptions( queryAnn, hints ) )
|
.setLockOptions( hints.determineLockOptions( queryAnn ) )
|
||||||
.setQuery( queryName )
|
.setQuery( queryName )
|
||||||
.setCacheable( getBoolean( queryName, "org.hibernate.cacheable", hints ) )
|
.setCacheable( hints.getBoolean( queryName, QueryHints.CACHEABLE ) )
|
||||||
.setCacheRegion( getString( queryName, "org.hibernate.cacheRegion", hints ) )
|
.setCacheRegion( hints.getString( queryName, QueryHints.CACHE_REGION ) )
|
||||||
.setTimeout( getTimeout( queryName, hints ) )
|
.setTimeout( hints.getTimeout( queryName ) )
|
||||||
.setFetchSize( getInteger( queryName, "org.hibernate.fetchSize", hints ) )
|
.setFetchSize( hints.getInteger( queryName, QueryHints.FETCH_SIZE ) )
|
||||||
.setFlushMode( getFlushMode( queryName, hints ) )
|
.setFlushMode( hints.getFlushMode( queryName ) )
|
||||||
.setCacheMode( getCacheMode( queryName, hints ) )
|
.setCacheMode( hints.getCacheMode( queryName ) )
|
||||||
.setReadOnly( getBoolean( queryName, "org.hibernate.readOnly", hints ) )
|
.setReadOnly( hints.getBoolean( queryName, QueryHints.READ_ONLY ) )
|
||||||
.setComment( getString( queryName, "org.hibernate.comment", hints ) )
|
.setComment( hints.getString( queryName, QueryHints.COMMENT ) )
|
||||||
.setParameterTypes( null )
|
.setParameterTypes( null )
|
||||||
.createNamedQueryDefinition();
|
.createNamedQueryDefinition();
|
||||||
|
|
||||||
|
@ -97,17 +94,7 @@ public abstract class QueryBinder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static LockOptions determineLockOptions(NamedQuery namedQueryAnnotation, QueryHint[] hints) {
|
|
||||||
LockModeType lockModeType = namedQueryAnnotation.lockMode();
|
|
||||||
Integer lockTimeoutHint = getInteger( namedQueryAnnotation.name(), "javax.persistence.lock.timeout", hints );
|
|
||||||
|
|
||||||
LockOptions lockOptions = new LockOptions( LockModeConverter.convertToLockMode( lockModeType ) );
|
|
||||||
if ( lockTimeoutHint != null ) {
|
|
||||||
lockOptions.setTimeOut( lockTimeoutHint );
|
|
||||||
}
|
|
||||||
|
|
||||||
return lockOptions;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void bindNativeQuery(NamedNativeQuery queryAnn, Mappings mappings, boolean isDefault) {
|
public static void bindNativeQuery(NamedNativeQuery queryAnn, Mappings mappings, boolean isDefault) {
|
||||||
if ( queryAnn == null ) return;
|
if ( queryAnn == null ) return;
|
||||||
|
@ -116,22 +103,22 @@ public abstract class QueryBinder {
|
||||||
throw new AnnotationException( "A named query must have a name when used in class or package level" );
|
throw new AnnotationException( "A named query must have a name when used in class or package level" );
|
||||||
}
|
}
|
||||||
String resultSetMapping = queryAnn.resultSetMapping();
|
String resultSetMapping = queryAnn.resultSetMapping();
|
||||||
QueryHint[] hints = queryAnn.hints();
|
QueryHintDefinition hints = new QueryHintDefinition( queryAnn.hints() );
|
||||||
String queryName = queryAnn.query();
|
String queryName = queryAnn.query();
|
||||||
|
|
||||||
NamedSQLQueryDefinitionBuilder builder = new NamedSQLQueryDefinitionBuilder( queryAnn.name() )
|
NamedSQLQueryDefinitionBuilder builder = new NamedSQLQueryDefinitionBuilder( queryAnn.name() )
|
||||||
.setQuery( queryName )
|
.setQuery( queryName )
|
||||||
.setQuerySpaces( null )
|
.setQuerySpaces( null )
|
||||||
.setCacheable( getBoolean( queryName, "org.hibernate.cacheable", hints ) )
|
.setCacheable( hints.getBoolean( queryName, QueryHints.CACHEABLE ) )
|
||||||
.setCacheRegion( getString( queryName, "org.hibernate.cacheRegion", hints ) )
|
.setCacheRegion( hints.getString( queryName, QueryHints.CACHE_REGION ) )
|
||||||
.setTimeout( getTimeout( queryName, hints ) )
|
.setTimeout( hints.getTimeout( queryName ) )
|
||||||
.setFetchSize( getInteger( queryName, "org.hibernate.fetchSize", hints ) )
|
.setFetchSize( hints.getInteger( queryName, QueryHints.FETCH_SIZE ) )
|
||||||
.setFlushMode( getFlushMode( queryName, hints ) )
|
.setFlushMode( hints.getFlushMode( queryName ) )
|
||||||
.setCacheMode( getCacheMode( queryName, hints ) )
|
.setCacheMode( hints.getCacheMode( queryName ) )
|
||||||
.setReadOnly( getBoolean( queryName, "org.hibernate.readOnly", hints ) )
|
.setReadOnly( hints.getBoolean( queryName, QueryHints.READ_ONLY ) )
|
||||||
.setComment( getString( queryName, "org.hibernate.comment", hints ) )
|
.setComment( hints.getString( queryName, QueryHints.COMMENT ) )
|
||||||
.setParameterTypes( null )
|
.setParameterTypes( null )
|
||||||
.setCallable( getBoolean( queryName, "org.hibernate.callable", hints ) );
|
.setCallable( hints.getBoolean( queryName, QueryHints.CALLABLE ) );
|
||||||
|
|
||||||
if ( !BinderHelper.isEmptyAnnotationValue( resultSetMapping ) ) {
|
if ( !BinderHelper.isEmptyAnnotationValue( resultSetMapping ) ) {
|
||||||
//sql result set usage
|
//sql result set usage
|
||||||
|
@ -332,7 +319,7 @@ public abstract class QueryBinder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void bindNamedStoredProcedureQuery(NamedStoredProcedureQuery annotation, Mappings mappings) {
|
public static void bindNamedStoredProcedureQuery(NamedStoredProcedureQuery annotation, Mappings mappings, boolean isDefault) {
|
||||||
if ( annotation == null ) {
|
if ( annotation == null ) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -342,7 +329,12 @@ public abstract class QueryBinder {
|
||||||
}
|
}
|
||||||
|
|
||||||
final NamedProcedureCallDefinition def = new NamedProcedureCallDefinition( annotation );
|
final NamedProcedureCallDefinition def = new NamedProcedureCallDefinition( annotation );
|
||||||
mappings.addNamedProcedureCallDefinition( def );
|
|
||||||
|
if(isDefault){
|
||||||
|
mappings.addDefaultNamedProcedureCallDefinition( def );
|
||||||
|
} else{
|
||||||
|
mappings.addNamedProcedureCallDefinition( def );
|
||||||
|
}
|
||||||
LOG.debugf( "Bound named stored procedure query : %s => %s", def.getRegisteredName(), def.getProcedureName() );
|
LOG.debugf( "Bound named stored procedure query : %s => %s", def.getRegisteredName(), def.getProcedureName() );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,109 +351,5 @@ public abstract class QueryBinder {
|
||||||
mappings.addSecondPass( new ResultsetMappingSecondPass( ann, mappings, isDefault ) );
|
mappings.addSecondPass( new ResultsetMappingSecondPass( ann, mappings, isDefault ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
private static CacheMode getCacheMode(String query, QueryHint[] hints) {
|
|
||||||
for (QueryHint hint : hints) {
|
|
||||||
if ( "org.hibernate.cacheMode".equals( hint.name() ) ) {
|
|
||||||
if ( hint.value().equalsIgnoreCase( CacheMode.GET.toString() ) ) {
|
|
||||||
return CacheMode.GET;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( CacheMode.IGNORE.toString() ) ) {
|
|
||||||
return CacheMode.IGNORE;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( CacheMode.NORMAL.toString() ) ) {
|
|
||||||
return CacheMode.NORMAL;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( CacheMode.PUT.toString() ) ) {
|
|
||||||
return CacheMode.PUT;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( CacheMode.REFRESH.toString() ) ) {
|
|
||||||
return CacheMode.REFRESH;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new AnnotationException( "Unknown CacheMode in hint: " + query + ":" + hint.name() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static FlushMode getFlushMode(String query, QueryHint[] hints) {
|
|
||||||
for (QueryHint hint : hints) {
|
|
||||||
if ( "org.hibernate.flushMode".equals( hint.name() ) ) {
|
|
||||||
if ( hint.value().equalsIgnoreCase( FlushMode.ALWAYS.toString() ) ) {
|
|
||||||
return FlushMode.ALWAYS;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( FlushMode.AUTO.toString() ) ) {
|
|
||||||
return FlushMode.AUTO;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( FlushMode.COMMIT.toString() ) ) {
|
|
||||||
return FlushMode.COMMIT;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( FlushMode.NEVER.toString() ) ) {
|
|
||||||
return FlushMode.MANUAL;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( FlushMode.MANUAL.toString() ) ) {
|
|
||||||
return FlushMode.MANUAL;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new AnnotationException( "Unknown FlushMode in hint: " + query + ":" + hint.name() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static boolean getBoolean(String query, String hintName, QueryHint[] hints) {
|
|
||||||
for (QueryHint hint : hints) {
|
|
||||||
if ( hintName.equals( hint.name() ) ) {
|
|
||||||
if ( hint.value().equalsIgnoreCase( "true" ) ) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
else if ( hint.value().equalsIgnoreCase( "false" ) ) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new AnnotationException( "Not a boolean in hint: " + query + ":" + hint.name() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String getString(String query, String hintName, QueryHint[] hints) {
|
|
||||||
for (QueryHint hint : hints) {
|
|
||||||
if ( hintName.equals( hint.name() ) ) {
|
|
||||||
return hint.value();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Integer getInteger(String query, String hintName, QueryHint[] hints) {
|
|
||||||
for (QueryHint hint : hints) {
|
|
||||||
if ( hintName.equals( hint.name() ) ) {
|
|
||||||
try {
|
|
||||||
return Integer.decode( hint.value() );
|
|
||||||
}
|
|
||||||
catch (NumberFormatException nfe) {
|
|
||||||
throw new AnnotationException( "Not an integer in hint: " + query + ":" + hint.name(), nfe );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Integer getTimeout(String queryName, QueryHint[] hints) {
|
|
||||||
Integer timeout = getInteger( queryName, "javax.persistence.query.timeout", hints );
|
|
||||||
|
|
||||||
if ( timeout != null ) {
|
|
||||||
// convert milliseconds to seconds
|
|
||||||
timeout = (int)Math.round(timeout.doubleValue() / 1000.0 );
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// timeout is already in seconds
|
|
||||||
timeout = getInteger( queryName, "org.hibernate.timeout", hints );
|
|
||||||
}
|
|
||||||
return timeout;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,152 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
|
||||||
|
* indicated by the @author tags or express copyright attribution
|
||||||
|
* statements applied by the authors. All third-party contributions are
|
||||||
|
* distributed under license by Red Hat Inc.
|
||||||
|
*
|
||||||
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
* Lesser General Public License, as published by the Free Software Foundation.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||||
|
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||||
|
* for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU Lesser General Public License
|
||||||
|
* along with this distribution; if not, write to:
|
||||||
|
* Free Software Foundation, Inc.
|
||||||
|
* 51 Franklin Street, Fifth Floor
|
||||||
|
* Boston, MA 02110-1301 USA
|
||||||
|
*/
|
||||||
|
package org.hibernate.cfg.annotations;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import javax.persistence.LockModeType;
|
||||||
|
import javax.persistence.NamedQuery;
|
||||||
|
import javax.persistence.QueryHint;
|
||||||
|
|
||||||
|
import org.hibernate.AnnotationException;
|
||||||
|
import org.hibernate.CacheMode;
|
||||||
|
import org.hibernate.FlushMode;
|
||||||
|
import org.hibernate.LockOptions;
|
||||||
|
import org.hibernate.MappingException;
|
||||||
|
import org.hibernate.annotations.QueryHints;
|
||||||
|
import org.hibernate.internal.util.LockModeConverter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Strong Liu <stliu@hibernate.org>
|
||||||
|
*/
|
||||||
|
public class QueryHintDefinition {
|
||||||
|
private final Map<String, Object> hintsMap;
|
||||||
|
|
||||||
|
public QueryHintDefinition(final QueryHint[] hints) {
|
||||||
|
if ( hints == null || hints.length == 0 ) {
|
||||||
|
hintsMap = Collections.emptyMap();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
final Map<String, Object> hintsMap = new HashMap<String, Object>();
|
||||||
|
for ( QueryHint hint : hints ) {
|
||||||
|
hintsMap.put( hint.name(), hint.value() );
|
||||||
|
}
|
||||||
|
this.hintsMap = hintsMap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public CacheMode getCacheMode(String query) {
|
||||||
|
String hitName = QueryHints.CACHE_MODE;
|
||||||
|
String value =(String) hintsMap.get( hitName );
|
||||||
|
if ( value == null ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return CacheMode.interpretExternalSetting( value );
|
||||||
|
}
|
||||||
|
catch ( MappingException e ) {
|
||||||
|
throw new AnnotationException( "Unknown CacheMode in hint: " + query + ":" + hitName, e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public FlushMode getFlushMode(String query) {
|
||||||
|
String hitName = QueryHints.FLUSH_MODE;
|
||||||
|
String value =(String) hintsMap.get( hitName );
|
||||||
|
if ( value == null ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return FlushMode.interpretExternalSetting( value );
|
||||||
|
}
|
||||||
|
catch ( MappingException e ) {
|
||||||
|
throw new AnnotationException( "Unknown FlushMode in hint: " + query + ":" + hitName, e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean getBoolean(String query, String hintName) {
|
||||||
|
String value =(String) hintsMap.get( hintName );
|
||||||
|
if ( value == null ) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if ( value.equalsIgnoreCase( "true" ) ) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if ( value.equalsIgnoreCase( "false" ) ) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new AnnotationException( "Not a boolean in hint: " + query + ":" + hintName );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getString(String query, String hintName) {
|
||||||
|
return (String) hintsMap.get( hintName );
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getInteger(String query, String hintName) {
|
||||||
|
String value = (String) hintsMap.get( hintName );
|
||||||
|
if ( value == null ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return Integer.decode( value );
|
||||||
|
}
|
||||||
|
catch ( NumberFormatException nfe ) {
|
||||||
|
throw new AnnotationException( "Not an integer in hint: " + query + ":" + hintName, nfe );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getTimeout(String queryName) {
|
||||||
|
Integer timeout = getInteger( queryName, QueryHints.TIMEOUT_JPA );
|
||||||
|
|
||||||
|
if ( timeout != null ) {
|
||||||
|
// convert milliseconds to seconds
|
||||||
|
timeout = (int) Math.round( timeout.doubleValue() / 1000.0 );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// timeout is already in seconds
|
||||||
|
timeout = getInteger( queryName, QueryHints.TIMEOUT_HIBERNATE );
|
||||||
|
}
|
||||||
|
return timeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
public LockOptions determineLockOptions(NamedQuery namedQueryAnnotation) {
|
||||||
|
LockModeType lockModeType = namedQueryAnnotation.lockMode();
|
||||||
|
Integer lockTimeoutHint = getInteger( namedQueryAnnotation.name(), "javax.persistence.lock.timeout" );
|
||||||
|
|
||||||
|
LockOptions lockOptions = new LockOptions( LockModeConverter.convertToLockMode( lockModeType ) );
|
||||||
|
if ( lockTimeoutHint != null ) {
|
||||||
|
lockOptions.setTimeOut( lockTimeoutHint );
|
||||||
|
}
|
||||||
|
|
||||||
|
return lockOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, Object> getHintsMap() {
|
||||||
|
return hintsMap;
|
||||||
|
}
|
||||||
|
}
|
|
@ -33,6 +33,7 @@ import java.util.Map;
|
||||||
import javax.persistence.EntityListeners;
|
import javax.persistence.EntityListeners;
|
||||||
import javax.persistence.NamedNativeQuery;
|
import javax.persistence.NamedNativeQuery;
|
||||||
import javax.persistence.NamedQuery;
|
import javax.persistence.NamedQuery;
|
||||||
|
import javax.persistence.NamedStoredProcedureQuery;
|
||||||
import javax.persistence.SequenceGenerator;
|
import javax.persistence.SequenceGenerator;
|
||||||
import javax.persistence.SqlResultSetMapping;
|
import javax.persistence.SqlResultSetMapping;
|
||||||
import javax.persistence.TableGenerator;
|
import javax.persistence.TableGenerator;
|
||||||
|
@ -49,6 +50,7 @@ import org.hibernate.internal.util.ReflectHelper;
|
||||||
*
|
*
|
||||||
* @author Emmanuel Bernard
|
* @author Emmanuel Bernard
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
public class JPAMetadataProvider implements MetadataProvider, Serializable {
|
public class JPAMetadataProvider implements MetadataProvider, Serializable {
|
||||||
private transient MetadataProvider delegate = new JavaMetadataProvider();
|
private transient MetadataProvider delegate = new JavaMetadataProvider();
|
||||||
private transient Map<Object, Object> defaults;
|
private transient Map<Object, Object> defaults;
|
||||||
|
@ -152,6 +154,16 @@ public class JPAMetadataProvider implements MetadataProvider, Serializable {
|
||||||
element, xmlDefaults
|
element, xmlDefaults
|
||||||
);
|
);
|
||||||
sqlResultSetMappings.addAll( currentSqlResultSetMappings );
|
sqlResultSetMappings.addAll( currentSqlResultSetMappings );
|
||||||
|
|
||||||
|
List<NamedStoredProcedureQuery> namedStoredProcedureQueries = (List<NamedStoredProcedureQuery>)defaults.get( NamedStoredProcedureQuery.class );
|
||||||
|
if(namedStoredProcedureQueries==null){
|
||||||
|
namedStoredProcedureQueries = new ArrayList<NamedStoredProcedureQuery>( );
|
||||||
|
defaults.put( NamedStoredProcedureQuery.class, namedStoredProcedureQueries );
|
||||||
|
}
|
||||||
|
List<NamedStoredProcedureQuery> currentNamedStoredProcedureQueries = JPAOverriddenAnnotationReader.buildNamedStoreProcedureQueries(
|
||||||
|
element, xmlDefaults
|
||||||
|
);
|
||||||
|
namedStoredProcedureQueries.addAll( currentNamedStoredProcedureQueries );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return defaults;
|
return defaults;
|
||||||
|
|
|
@ -88,14 +88,21 @@ import javax.persistence.MapKeyJoinColumns;
|
||||||
import javax.persistence.MapKeyTemporal;
|
import javax.persistence.MapKeyTemporal;
|
||||||
import javax.persistence.MappedSuperclass;
|
import javax.persistence.MappedSuperclass;
|
||||||
import javax.persistence.MapsId;
|
import javax.persistence.MapsId;
|
||||||
|
import javax.persistence.NamedAttributeNode;
|
||||||
|
import javax.persistence.NamedEntityGraph;
|
||||||
|
import javax.persistence.NamedEntityGraphs;
|
||||||
import javax.persistence.NamedNativeQueries;
|
import javax.persistence.NamedNativeQueries;
|
||||||
import javax.persistence.NamedNativeQuery;
|
import javax.persistence.NamedNativeQuery;
|
||||||
import javax.persistence.NamedQueries;
|
import javax.persistence.NamedQueries;
|
||||||
import javax.persistence.NamedQuery;
|
import javax.persistence.NamedQuery;
|
||||||
|
import javax.persistence.NamedStoredProcedureQueries;
|
||||||
|
import javax.persistence.NamedStoredProcedureQuery;
|
||||||
|
import javax.persistence.NamedSubgraph;
|
||||||
import javax.persistence.OneToMany;
|
import javax.persistence.OneToMany;
|
||||||
import javax.persistence.OneToOne;
|
import javax.persistence.OneToOne;
|
||||||
import javax.persistence.OrderBy;
|
import javax.persistence.OrderBy;
|
||||||
import javax.persistence.OrderColumn;
|
import javax.persistence.OrderColumn;
|
||||||
|
import javax.persistence.ParameterMode;
|
||||||
import javax.persistence.PostLoad;
|
import javax.persistence.PostLoad;
|
||||||
import javax.persistence.PostPersist;
|
import javax.persistence.PostPersist;
|
||||||
import javax.persistence.PostRemove;
|
import javax.persistence.PostRemove;
|
||||||
|
@ -111,6 +118,7 @@ import javax.persistence.SecondaryTables;
|
||||||
import javax.persistence.SequenceGenerator;
|
import javax.persistence.SequenceGenerator;
|
||||||
import javax.persistence.SqlResultSetMapping;
|
import javax.persistence.SqlResultSetMapping;
|
||||||
import javax.persistence.SqlResultSetMappings;
|
import javax.persistence.SqlResultSetMappings;
|
||||||
|
import javax.persistence.StoredProcedureParameter;
|
||||||
import javax.persistence.Table;
|
import javax.persistence.Table;
|
||||||
import javax.persistence.TableGenerator;
|
import javax.persistence.TableGenerator;
|
||||||
import javax.persistence.Temporal;
|
import javax.persistence.Temporal;
|
||||||
|
@ -176,10 +184,14 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
|
||||||
annotationToXml.put( DiscriminatorColumn.class, "discriminator-column" );
|
annotationToXml.put( DiscriminatorColumn.class, "discriminator-column" );
|
||||||
annotationToXml.put( SequenceGenerator.class, "sequence-generator" );
|
annotationToXml.put( SequenceGenerator.class, "sequence-generator" );
|
||||||
annotationToXml.put( TableGenerator.class, "table-generator" );
|
annotationToXml.put( TableGenerator.class, "table-generator" );
|
||||||
|
annotationToXml.put( NamedEntityGraph.class, "named-entity-graph" );
|
||||||
|
annotationToXml.put( NamedEntityGraphs.class, "named-entity-graph" );
|
||||||
annotationToXml.put( NamedQuery.class, "named-query" );
|
annotationToXml.put( NamedQuery.class, "named-query" );
|
||||||
annotationToXml.put( NamedQueries.class, "named-query" );
|
annotationToXml.put( NamedQueries.class, "named-query" );
|
||||||
annotationToXml.put( NamedNativeQuery.class, "named-native-query" );
|
annotationToXml.put( NamedNativeQuery.class, "named-native-query" );
|
||||||
annotationToXml.put( NamedNativeQueries.class, "named-native-query" );
|
annotationToXml.put( NamedNativeQueries.class, "named-native-query" );
|
||||||
|
annotationToXml.put( NamedStoredProcedureQuery.class, "named-stored-procedure-query" );
|
||||||
|
annotationToXml.put( NamedStoredProcedureQueries.class, "named-stored-procedure-query" );
|
||||||
annotationToXml.put( SqlResultSetMapping.class, "sql-result-set-mapping" );
|
annotationToXml.put( SqlResultSetMapping.class, "sql-result-set-mapping" );
|
||||||
annotationToXml.put( SqlResultSetMappings.class, "sql-result-set-mapping" );
|
annotationToXml.put( SqlResultSetMappings.class, "sql-result-set-mapping" );
|
||||||
annotationToXml.put( ExcludeDefaultListeners.class, "exclude-default-listeners" );
|
annotationToXml.put( ExcludeDefaultListeners.class, "exclude-default-listeners" );
|
||||||
|
@ -357,6 +369,8 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
|
||||||
addIfNotNull( annotationList, getTableGenerator( tree, defaults ) );
|
addIfNotNull( annotationList, getTableGenerator( tree, defaults ) );
|
||||||
addIfNotNull( annotationList, getNamedQueries( tree, defaults ) );
|
addIfNotNull( annotationList, getNamedQueries( tree, defaults ) );
|
||||||
addIfNotNull( annotationList, getNamedNativeQueries( tree, defaults ) );
|
addIfNotNull( annotationList, getNamedNativeQueries( tree, defaults ) );
|
||||||
|
addIfNotNull( annotationList, getNamedStoredProcedureQueries( tree, defaults ) );
|
||||||
|
addIfNotNull( annotationList, getNamedEntityGraphs( tree, defaults ) );
|
||||||
addIfNotNull( annotationList, getSqlResultSetMappings( tree, defaults ) );
|
addIfNotNull( annotationList, getSqlResultSetMappings( tree, defaults ) );
|
||||||
addIfNotNull( annotationList, getExcludeDefaultListeners( tree, defaults ) );
|
addIfNotNull( annotationList, getExcludeDefaultListeners( tree, defaults ) );
|
||||||
addIfNotNull( annotationList, getExcludeSuperclassListeners( tree, defaults ) );
|
addIfNotNull( annotationList, getExcludeSuperclassListeners( tree, defaults ) );
|
||||||
|
@ -1743,6 +1757,142 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static List<NamedEntityGraph> buildNamedEntityGraph(Element element, XMLContext.Default defaults) {
|
||||||
|
if ( element == null ) {
|
||||||
|
return new ArrayList<NamedEntityGraph>();
|
||||||
|
}
|
||||||
|
List<NamedEntityGraph> namedEntityGraphList = new ArrayList<NamedEntityGraph>();
|
||||||
|
List<Element> namedEntityGraphElements = element.elements( "named-entity-graph" );
|
||||||
|
for ( Element subElement : namedEntityGraphElements ) {
|
||||||
|
AnnotationDescriptor ann = new AnnotationDescriptor( NamedEntityGraph.class );
|
||||||
|
copyStringAttribute( ann, subElement, "name", false );
|
||||||
|
copyBooleanAttribute( ann, subElement, "include-all-attributes" );
|
||||||
|
bindNamedAttributeNodes( subElement, ann );
|
||||||
|
|
||||||
|
List<Element> subgraphNodes = subElement.elements( "subgraph" );
|
||||||
|
bindNamedSubgraph( defaults, ann, subgraphNodes );
|
||||||
|
List<Element> subclassSubgraphNodes = subElement.elements( "subclass-subgraph" );
|
||||||
|
bindNamedSubgraph( defaults, ann, subclassSubgraphNodes );
|
||||||
|
namedEntityGraphList.add( (NamedEntityGraph) AnnotationFactory.create( ann ) );
|
||||||
|
}
|
||||||
|
//TODO
|
||||||
|
return namedEntityGraphList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void bindNamedSubgraph(XMLContext.Default defaults, AnnotationDescriptor ann, List<Element> subgraphNodes) {
|
||||||
|
List<NamedSubgraph> annSubgraphNodes = new ArrayList<NamedSubgraph>( );
|
||||||
|
for(Element subgraphNode : subgraphNodes){
|
||||||
|
AnnotationDescriptor annSubgraphNode = new AnnotationDescriptor( NamedSubgraph.class );
|
||||||
|
copyStringAttribute( annSubgraphNode, subgraphNode, "name", true );
|
||||||
|
String clazzName = subgraphNode.attributeValue( "class" );
|
||||||
|
Class clazz;
|
||||||
|
try {
|
||||||
|
clazz = ReflectHelper.classForName(
|
||||||
|
XMLContext.buildSafeClassName( clazzName, defaults ),
|
||||||
|
JPAOverriddenAnnotationReader.class
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch ( ClassNotFoundException e ) {
|
||||||
|
throw new AnnotationException( "Unable to find entity-class: " + clazzName, e );
|
||||||
|
}
|
||||||
|
annSubgraphNode.setValue( "type", clazz );
|
||||||
|
bindNamedAttributeNodes(subgraphNode, annSubgraphNode);
|
||||||
|
annSubgraphNodes.add( (NamedSubgraph) AnnotationFactory.create( annSubgraphNode ) );
|
||||||
|
}
|
||||||
|
ann.setValue( "subgraphs", annSubgraphNodes.toArray( new NamedSubgraph[annSubgraphNodes.size()] ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void bindNamedAttributeNodes(Element subElement, AnnotationDescriptor ann) {
|
||||||
|
List<Element> namedAttributeNodes = subElement.elements("named-attribute-node");
|
||||||
|
List<NamedAttributeNode> annNamedAttributeNodes = new ArrayList<NamedAttributeNode>( );
|
||||||
|
for(Element namedAttributeNode : namedAttributeNodes){
|
||||||
|
AnnotationDescriptor annNamedAttributeNode = new AnnotationDescriptor( NamedAttributeNode.class );
|
||||||
|
copyStringAttribute( annNamedAttributeNode, namedAttributeNode, "value", true );
|
||||||
|
copyStringAttribute( annNamedAttributeNode, namedAttributeNode, "subgraph", false );
|
||||||
|
copyStringAttribute( annNamedAttributeNode, namedAttributeNode, "key-subgraph", false );
|
||||||
|
annNamedAttributeNodes.add( (NamedAttributeNode) AnnotationFactory.create( annNamedAttributeNode ) );
|
||||||
|
}
|
||||||
|
ann.setValue( "attributeNodes", annNamedAttributeNodes.toArray( new NamedAttributeNode[annNamedAttributeNodes.size()] ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<NamedStoredProcedureQuery> buildNamedStoreProcedureQueries(Element element, XMLContext.Default defaults) {
|
||||||
|
if ( element == null ) {
|
||||||
|
return new ArrayList<NamedStoredProcedureQuery>();
|
||||||
|
}
|
||||||
|
List namedStoredProcedureElements = element.elements( "named-stored-procedure-query" );
|
||||||
|
List<NamedStoredProcedureQuery> namedStoredProcedureQueries = new ArrayList<NamedStoredProcedureQuery>();
|
||||||
|
for ( Object obj : namedStoredProcedureElements ) {
|
||||||
|
Element subElement = (Element) obj;
|
||||||
|
AnnotationDescriptor ann = new AnnotationDescriptor( NamedStoredProcedureQuery.class );
|
||||||
|
copyStringAttribute( ann, subElement, "name", true );
|
||||||
|
copyStringAttribute( ann, subElement, "procedure-name", true );
|
||||||
|
|
||||||
|
List<Element> elements = subElement.elements( "parameter" );
|
||||||
|
List<StoredProcedureParameter> storedProcedureParameters = new ArrayList<StoredProcedureParameter>();
|
||||||
|
|
||||||
|
for ( Element parameterElement : elements ) {
|
||||||
|
AnnotationDescriptor parameterDescriptor = new AnnotationDescriptor( StoredProcedureParameter.class );
|
||||||
|
copyStringAttribute( parameterDescriptor, parameterElement, "name", false );
|
||||||
|
String modeValue = parameterElement.attributeValue( "mode" );
|
||||||
|
if ( modeValue == null ) {
|
||||||
|
parameterDescriptor.setValue( "mode", ParameterMode.IN );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parameterDescriptor.setValue( "mode", ParameterMode.valueOf( modeValue.toUpperCase() ) );
|
||||||
|
}
|
||||||
|
String clazzName = parameterElement.attributeValue( "class" );
|
||||||
|
Class clazz;
|
||||||
|
try {
|
||||||
|
clazz = ReflectHelper.classForName(
|
||||||
|
XMLContext.buildSafeClassName( clazzName, defaults ),
|
||||||
|
JPAOverriddenAnnotationReader.class
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch ( ClassNotFoundException e ) {
|
||||||
|
throw new AnnotationException( "Unable to find entity-class: " + clazzName, e );
|
||||||
|
}
|
||||||
|
parameterDescriptor.setValue( "type", clazz );
|
||||||
|
storedProcedureParameters.add( (StoredProcedureParameter) AnnotationFactory.create( parameterDescriptor ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
ann.setValue(
|
||||||
|
"parameters",
|
||||||
|
storedProcedureParameters.toArray( new StoredProcedureParameter[storedProcedureParameters.size()] )
|
||||||
|
);
|
||||||
|
|
||||||
|
elements = subElement.elements( "result-class" );
|
||||||
|
List<Class> returnClasses = new ArrayList<Class>();
|
||||||
|
for ( Element classElement : elements ) {
|
||||||
|
String clazzName = classElement.getTextTrim();
|
||||||
|
Class clazz;
|
||||||
|
try {
|
||||||
|
clazz = ReflectHelper.classForName(
|
||||||
|
XMLContext.buildSafeClassName( clazzName, defaults ),
|
||||||
|
JPAOverriddenAnnotationReader.class
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch ( ClassNotFoundException e ) {
|
||||||
|
throw new AnnotationException( "Unable to find entity-class: " + clazzName, e );
|
||||||
|
}
|
||||||
|
returnClasses.add( clazz );
|
||||||
|
}
|
||||||
|
ann.setValue( "resultClasses", returnClasses.toArray( new Class[returnClasses.size()] ) );
|
||||||
|
|
||||||
|
|
||||||
|
elements = subElement.elements( "result-set-mapping" );
|
||||||
|
List<String> resultSetMappings = new ArrayList<String>();
|
||||||
|
for ( Element resultSetMappingElement : elements ) {
|
||||||
|
resultSetMappings.add( resultSetMappingElement.getTextTrim() );
|
||||||
|
}
|
||||||
|
ann.setValue( "resultSetMappings", resultSetMappings.toArray( new String[resultSetMappings.size()] ) );
|
||||||
|
elements = subElement.elements( "hint" );
|
||||||
|
buildQueryHints( elements, ann );
|
||||||
|
namedStoredProcedureQueries.add( (NamedStoredProcedureQuery) AnnotationFactory.create( ann ) );
|
||||||
|
}
|
||||||
|
return namedStoredProcedureQueries;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public static List<SqlResultSetMapping> buildSqlResultsetMappings(Element element, XMLContext.Default defaults) {
|
public static List<SqlResultSetMapping> buildSqlResultsetMappings(Element element, XMLContext.Default defaults) {
|
||||||
if ( element == null ) {
|
if ( element == null ) {
|
||||||
return new ArrayList<SqlResultSetMapping>();
|
return new ArrayList<SqlResultSetMapping>();
|
||||||
|
@ -1872,6 +2022,84 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private NamedEntityGraphs getNamedEntityGraphs(Element tree, XMLContext.Default defaults) {
|
||||||
|
List<NamedEntityGraph> queries = buildNamedEntityGraph( tree, defaults );
|
||||||
|
if ( defaults.canUseJavaAnnotations() ) {
|
||||||
|
NamedEntityGraph annotation = getJavaAnnotation( NamedEntityGraph.class );
|
||||||
|
addNamedEntityGraphIfNeeded( annotation, queries );
|
||||||
|
NamedEntityGraphs annotations = getJavaAnnotation( NamedEntityGraphs.class );
|
||||||
|
if ( annotations != null ) {
|
||||||
|
for ( NamedEntityGraph current : annotations.value() ) {
|
||||||
|
addNamedEntityGraphIfNeeded( current, queries );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( queries.size() > 0 ) {
|
||||||
|
AnnotationDescriptor ad = new AnnotationDescriptor( NamedEntityGraphs.class );
|
||||||
|
ad.setValue( "value", queries.toArray( new NamedEntityGraph[queries.size()] ) );
|
||||||
|
return AnnotationFactory.create( ad );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addNamedEntityGraphIfNeeded(NamedEntityGraph annotation, List<NamedEntityGraph> queries) {
|
||||||
|
if ( annotation != null ) {
|
||||||
|
String queryName = annotation.name();
|
||||||
|
boolean present = false;
|
||||||
|
for ( NamedEntityGraph current : queries ) {
|
||||||
|
if ( current.name().equals( queryName ) ) {
|
||||||
|
present = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( !present ) {
|
||||||
|
queries.add( annotation );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private NamedStoredProcedureQueries getNamedStoredProcedureQueries(Element tree, XMLContext.Default defaults) {
|
||||||
|
List<NamedStoredProcedureQuery> queries = buildNamedStoreProcedureQueries( tree, defaults );
|
||||||
|
if ( defaults.canUseJavaAnnotations() ) {
|
||||||
|
NamedStoredProcedureQuery annotation = getJavaAnnotation( NamedStoredProcedureQuery.class );
|
||||||
|
addNamedStoredProcedureQueryIfNeeded( annotation, queries );
|
||||||
|
NamedStoredProcedureQueries annotations = getJavaAnnotation( NamedStoredProcedureQueries.class );
|
||||||
|
if ( annotations != null ) {
|
||||||
|
for ( NamedStoredProcedureQuery current : annotations.value() ) {
|
||||||
|
addNamedStoredProcedureQueryIfNeeded( current, queries );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( queries.size() > 0 ) {
|
||||||
|
AnnotationDescriptor ad = new AnnotationDescriptor( NamedStoredProcedureQueries.class );
|
||||||
|
ad.setValue( "value", queries.toArray( new NamedStoredProcedureQuery[queries.size()] ) );
|
||||||
|
return AnnotationFactory.create( ad );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addNamedStoredProcedureQueryIfNeeded(NamedStoredProcedureQuery annotation, List<NamedStoredProcedureQuery> queries) {
|
||||||
|
if ( annotation != null ) {
|
||||||
|
String queryName = annotation.name();
|
||||||
|
boolean present = false;
|
||||||
|
for ( NamedStoredProcedureQuery current : queries ) {
|
||||||
|
if ( current.name().equals( queryName ) ) {
|
||||||
|
present = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( !present ) {
|
||||||
|
queries.add( annotation );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private NamedNativeQueries getNamedNativeQueries(Element tree, XMLContext.Default defaults) {
|
private NamedNativeQueries getNamedNativeQueries(Element tree, XMLContext.Default defaults) {
|
||||||
List<NamedNativeQuery> queries = (List<NamedNativeQuery>) buildNamedQueries( tree, true, defaults );
|
List<NamedNativeQuery> queries = (List<NamedNativeQuery>) buildNamedQueries( tree, true, defaults );
|
||||||
if ( defaults.canUseJavaAnnotations() ) {
|
if ( defaults.canUseJavaAnnotations() ) {
|
||||||
|
@ -1910,6 +2138,25 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void buildQueryHints(List<Element> elements, AnnotationDescriptor ann){
|
||||||
|
List<QueryHint> queryHints = new ArrayList<QueryHint>( elements.size() );
|
||||||
|
for ( Element hint : elements ) {
|
||||||
|
AnnotationDescriptor hintDescriptor = new AnnotationDescriptor( QueryHint.class );
|
||||||
|
String value = hint.attributeValue( "name" );
|
||||||
|
if ( value == null ) {
|
||||||
|
throw new AnnotationException( "<hint> without name. " + SCHEMA_VALIDATION );
|
||||||
|
}
|
||||||
|
hintDescriptor.setValue( "name", value );
|
||||||
|
value = hint.attributeValue( "value" );
|
||||||
|
if ( value == null ) {
|
||||||
|
throw new AnnotationException( "<hint> without value. " + SCHEMA_VALIDATION );
|
||||||
|
}
|
||||||
|
hintDescriptor.setValue( "value", value );
|
||||||
|
queryHints.add( (QueryHint) AnnotationFactory.create( hintDescriptor ) );
|
||||||
|
}
|
||||||
|
ann.setValue( "hints", queryHints.toArray( new QueryHint[queryHints.size()] ) );
|
||||||
|
}
|
||||||
|
|
||||||
public static List buildNamedQueries(Element element, boolean isNative, XMLContext.Default defaults) {
|
public static List buildNamedQueries(Element element, boolean isNative, XMLContext.Default defaults) {
|
||||||
if ( element == null ) {
|
if ( element == null ) {
|
||||||
return new ArrayList();
|
return new ArrayList();
|
||||||
|
@ -1931,22 +2178,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
|
||||||
}
|
}
|
||||||
copyStringElement( queryElt, ann, "query" );
|
copyStringElement( queryElt, ann, "query" );
|
||||||
List<Element> elements = subelement.elements( "hint" );
|
List<Element> elements = subelement.elements( "hint" );
|
||||||
List<QueryHint> queryHints = new ArrayList<QueryHint>( elements.size() );
|
buildQueryHints( elements, ann );
|
||||||
for ( Element hint : elements ) {
|
|
||||||
AnnotationDescriptor hintDescriptor = new AnnotationDescriptor( QueryHint.class );
|
|
||||||
String value = hint.attributeValue( "name" );
|
|
||||||
if ( value == null ) {
|
|
||||||
throw new AnnotationException( "<hint> without name. " + SCHEMA_VALIDATION );
|
|
||||||
}
|
|
||||||
hintDescriptor.setValue( "name", value );
|
|
||||||
value = hint.attributeValue( "value" );
|
|
||||||
if ( value == null ) {
|
|
||||||
throw new AnnotationException( "<hint> without value. " + SCHEMA_VALIDATION );
|
|
||||||
}
|
|
||||||
hintDescriptor.setValue( "value", value );
|
|
||||||
queryHints.add( (QueryHint) AnnotationFactory.create( hintDescriptor ) );
|
|
||||||
}
|
|
||||||
ann.setValue( "hints", queryHints.toArray( new QueryHint[queryHints.size()] ) );
|
|
||||||
String clazzName = subelement.attributeValue( "result-class" );
|
String clazzName = subelement.attributeValue( "result-class" );
|
||||||
if ( StringHelper.isNotEmpty( clazzName ) ) {
|
if ( StringHelper.isNotEmpty( clazzName ) ) {
|
||||||
Class clazz;
|
Class clazz;
|
||||||
|
|
|
@ -178,7 +178,7 @@ abstract class AbstractTransactSQLDialect extends Dialect {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map keyColumnNames) {
|
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map<String, String[]> keyColumnNames) {
|
||||||
// TODO: merge additional lockoptions support in Dialect.applyLocksToSql
|
// TODO: merge additional lockoptions support in Dialect.applyLocksToSql
|
||||||
final Iterator itr = aliasedLockOptions.getAliasLockIterator();
|
final Iterator itr = aliasedLockOptions.getAliasLockIterator();
|
||||||
final StringBuilder buffer = new StringBuilder( sql );
|
final StringBuilder buffer = new StringBuilder( sql );
|
||||||
|
|
|
@ -1468,7 +1468,7 @@ public abstract class Dialect implements ConversionContext {
|
||||||
* @param keyColumnNames a map of key columns indexed by aliased table names.
|
* @param keyColumnNames a map of key columns indexed by aliased table names.
|
||||||
* @return the modified SQL string.
|
* @return the modified SQL string.
|
||||||
*/
|
*/
|
||||||
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map keyColumnNames) {
|
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map<String, String[]> keyColumnNames) {
|
||||||
return sql + new ForUpdateFragment( this, aliasedLockOptions, keyColumnNames ).toFragmentString();
|
return sql + new ForUpdateFragment( this, aliasedLockOptions, keyColumnNames ).toFragmentString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class SybaseASE157Dialect extends SybaseASE15Dialect {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map keyColumnNames) {
|
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map<String, String[]> keyColumnNames) {
|
||||||
return sql + new ForUpdateFragment( this, aliasedLockOptions, keyColumnNames ).toFragmentString();
|
return sql + new ForUpdateFragment( this, aliasedLockOptions, keyColumnNames ).toFragmentString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -186,8 +186,10 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
|
||||||
// Inserting alias. It is unlikely that we would have to add alias, but just in case.
|
// Inserting alias. It is unlikely that we would have to add alias, but just in case.
|
||||||
alias = StringHelper.generateAlias( "page", unique );
|
alias = StringHelper.generateAlias( "page", unique );
|
||||||
sb.insert( nextComa, " as " + alias );
|
sb.insert( nextComa, " as " + alias );
|
||||||
|
int aliasExprLength = ( " as " + alias ).length();
|
||||||
++unique;
|
++unique;
|
||||||
nextComa += ( " as " + alias ).length();
|
nextComa += aliasExprLength;
|
||||||
|
endPos += aliasExprLength;
|
||||||
}
|
}
|
||||||
aliases.add( alias );
|
aliases.add( alias );
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||||
public class ColumnNameCache {
|
public class ColumnNameCache {
|
||||||
private static final float LOAD_FACTOR = .75f;
|
private static final float LOAD_FACTOR = .75f;
|
||||||
|
|
||||||
private final Map<String, Integer> columnNameToIndexCache;
|
private final ConcurrentHashMap<String, Integer> columnNameToIndexCache;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a ColumnNameCache
|
* Constructs a ColumnNameCache
|
||||||
|
|
|
@ -45,6 +45,7 @@ import org.hibernate.engine.spi.CollectionKey;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.engine.spi.SessionImplementor;
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
import org.hibernate.engine.spi.Status;
|
import org.hibernate.engine.spi.Status;
|
||||||
|
import org.hibernate.internal.CoreLogging;
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.persister.collection.CollectionPersister;
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
import org.hibernate.pretty.MessageHelper;
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
@ -60,12 +61,11 @@ import org.hibernate.pretty.MessageHelper;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class CollectionLoadContext {
|
public class CollectionLoadContext {
|
||||||
|
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( CollectionLoadContext.class );
|
||||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, CollectionLoadContext.class.getName());
|
|
||||||
|
|
||||||
private final LoadContexts loadContexts;
|
private final LoadContexts loadContexts;
|
||||||
private final ResultSet resultSet;
|
private final ResultSet resultSet;
|
||||||
private Set localLoadingCollectionKeys = new HashSet();
|
private Set<CollectionKey> localLoadingCollectionKeys = new HashSet<CollectionKey>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a collection load context for the given result set.
|
* Creates a collection load context for the given result set.
|
||||||
|
@ -122,12 +122,13 @@ public class CollectionLoadContext {
|
||||||
if ( collection != null ) {
|
if ( collection != null ) {
|
||||||
if ( collection.wasInitialized() ) {
|
if ( collection.wasInitialized() ) {
|
||||||
LOG.trace( "Collection already initialized; ignoring" );
|
LOG.trace( "Collection already initialized; ignoring" );
|
||||||
return null; // ignore this row of results! Note the early exit
|
// ignore this row of results! Note the early exit
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
LOG.trace( "Collection not yet initialized; initializing" );
|
LOG.trace( "Collection not yet initialized; initializing" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Object owner = loadContexts.getPersistenceContext().getCollectionOwner( key, persister );
|
final Object owner = loadContexts.getPersistenceContext().getCollectionOwner( key, persister );
|
||||||
final boolean newlySavedEntity = owner != null
|
final boolean newlySavedEntity = owner != null
|
||||||
&& loadContexts.getPersistenceContext().getEntry( owner ).getStatus() != Status.LOADING;
|
&& loadContexts.getPersistenceContext().getEntry( owner ).getStatus() != Status.LOADING;
|
||||||
if ( newlySavedEntity ) {
|
if ( newlySavedEntity ) {
|
||||||
|
@ -165,7 +166,7 @@ public class CollectionLoadContext {
|
||||||
* @param persister The persister for which to complete loading.
|
* @param persister The persister for which to complete loading.
|
||||||
*/
|
*/
|
||||||
public void endLoadingCollections(CollectionPersister persister) {
|
public void endLoadingCollections(CollectionPersister persister) {
|
||||||
SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||||
if ( !loadContexts.hasLoadingCollectionEntries()
|
if ( !loadContexts.hasLoadingCollectionEntries()
|
||||||
&& localLoadingCollectionKeys.isEmpty() ) {
|
&& localLoadingCollectionKeys.isEmpty() ) {
|
||||||
return;
|
return;
|
||||||
|
@ -177,17 +178,17 @@ public class CollectionLoadContext {
|
||||||
// internal loadingCollections map for matches and store those matches
|
// internal loadingCollections map for matches and store those matches
|
||||||
// in a temp collection. the temp collection is then used to "drive"
|
// in a temp collection. the temp collection is then used to "drive"
|
||||||
// the #endRead processing.
|
// the #endRead processing.
|
||||||
List matches = null;
|
List<LoadingCollectionEntry> matches = null;
|
||||||
Iterator iter = localLoadingCollectionKeys.iterator();
|
final Iterator itr = localLoadingCollectionKeys.iterator();
|
||||||
while ( iter.hasNext() ) {
|
while ( itr.hasNext() ) {
|
||||||
final CollectionKey collectionKey = (CollectionKey) iter.next();
|
final CollectionKey collectionKey = (CollectionKey) itr.next();
|
||||||
final LoadingCollectionEntry lce = loadContexts.locateLoadingCollectionEntry( collectionKey );
|
final LoadingCollectionEntry lce = loadContexts.locateLoadingCollectionEntry( collectionKey );
|
||||||
if ( lce == null ) {
|
if ( lce == null ) {
|
||||||
LOG.loadingCollectionKeyNotFound( collectionKey );
|
LOG.loadingCollectionKeyNotFound( collectionKey );
|
||||||
}
|
}
|
||||||
else if ( lce.getResultSet() == resultSet && lce.getPersister() == persister ) {
|
else if ( lce.getResultSet() == resultSet && lce.getPersister() == persister ) {
|
||||||
if ( matches == null ) {
|
if ( matches == null ) {
|
||||||
matches = new ArrayList();
|
matches = new ArrayList<LoadingCollectionEntry>();
|
||||||
}
|
}
|
||||||
matches.add( lce );
|
matches.add( lce );
|
||||||
if ( lce.getCollection().getOwner() == null ) {
|
if ( lce.getCollection().getOwner() == null ) {
|
||||||
|
@ -204,7 +205,7 @@ public class CollectionLoadContext {
|
||||||
|
|
||||||
// todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)...
|
// todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)...
|
||||||
loadContexts.unregisterLoadingCollectionXRef( collectionKey );
|
loadContexts.unregisterLoadingCollectionXRef( collectionKey );
|
||||||
iter.remove();
|
itr.remove();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,29 +221,35 @@ public class CollectionLoadContext {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) {
|
private void endLoadingCollections(CollectionPersister persister, List<LoadingCollectionEntry> matchedCollectionEntries) {
|
||||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||||
if ( matchedCollectionEntries == null ) {
|
if ( matchedCollectionEntries == null ) {
|
||||||
if ( debugEnabled ) LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() );
|
if ( debugEnabled ) {
|
||||||
|
LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() );
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
final int count = matchedCollectionEntries.size();
|
final int count = matchedCollectionEntries.size();
|
||||||
if ( debugEnabled ) LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
|
if ( debugEnabled ) {
|
||||||
|
LOG.debugf( "%s collections were found in result set for role: %s", count, persister.getRole() );
|
||||||
for ( int i = 0; i < count; i++ ) {
|
|
||||||
LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i );
|
|
||||||
endLoadingCollection( lce, persister );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( debugEnabled ) LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() );
|
for ( LoadingCollectionEntry matchedCollectionEntry : matchedCollectionEntries ) {
|
||||||
|
endLoadingCollection( matchedCollectionEntry, persister );
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( debugEnabled ) {
|
||||||
|
LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
|
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
|
||||||
LOG.tracev( "Ending loading collection [{0}]", lce );
|
LOG.tracev( "Ending loading collection [{0}]", lce );
|
||||||
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||||
|
|
||||||
boolean hasNoQueuedAdds = lce.getCollection().endRead(); // warning: can cause a recursive calls! (proxy initialization)
|
// warning: can cause a recursive calls! (proxy initialization)
|
||||||
|
final boolean hasNoQueuedAdds = lce.getCollection().endRead();
|
||||||
|
|
||||||
if ( persister.getCollectionType().hasHolder() ) {
|
if ( persister.getCollectionType().hasHolder() ) {
|
||||||
getLoadContext().getPersistenceContext().addCollectionHolder( lce.getCollection() );
|
getLoadContext().getPersistenceContext().addCollectionHolder( lce.getCollection() );
|
||||||
|
@ -260,11 +267,14 @@ public class CollectionLoadContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// add to cache if:
|
||||||
boolean addToCache = hasNoQueuedAdds && // there were no queued additions
|
boolean addToCache =
|
||||||
persister.hasCache() && // and the role has a cache
|
// there were no queued additions
|
||||||
session.getCacheMode().isPutEnabled() &&
|
hasNoQueuedAdds
|
||||||
!ce.isDoremove(); // and this is not a forced initialization during flush
|
// and the role has a cache
|
||||||
|
&& persister.hasCache()
|
||||||
|
// and this is not a forced initialization during flush
|
||||||
|
&& session.getCacheMode().isPutEnabled() && !ce.isDoremove();
|
||||||
if ( addToCache ) {
|
if ( addToCache ) {
|
||||||
addCollectionToCache( lce, persister );
|
addCollectionToCache( lce, persister );
|
||||||
}
|
}
|
||||||
|
@ -272,11 +282,11 @@ public class CollectionLoadContext {
|
||||||
if ( LOG.isDebugEnabled() ) {
|
if ( LOG.isDebugEnabled() ) {
|
||||||
LOG.debugf(
|
LOG.debugf(
|
||||||
"Collection fully initialized: %s",
|
"Collection fully initialized: %s",
|
||||||
MessageHelper.collectionInfoString(persister, lce.getCollection(), lce.getKey(), session)
|
MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session )
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
|
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||||
session.getFactory().getStatisticsImplementor().loadCollection(persister.getRole());
|
session.getFactory().getStatisticsImplementor().loadCollection( persister.getRole() );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -305,7 +315,8 @@ public class CollectionLoadContext {
|
||||||
// currently this works in conjuction with the check on
|
// currently this works in conjuction with the check on
|
||||||
// DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from
|
// DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from
|
||||||
// cache with enabled filters).
|
// cache with enabled filters).
|
||||||
return; // EARLY EXIT!!!!!
|
// EARLY EXIT!!!!!
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
final Object version;
|
final Object version;
|
||||||
|
@ -318,7 +329,7 @@ public class CollectionLoadContext {
|
||||||
// about its owner, that owner should be the same instance as associated with the PC, but we do the
|
// about its owner, that owner should be the same instance as associated with the PC, but we do the
|
||||||
// resolution against the PC anyway just to be safe since the lookup should not be costly.
|
// resolution against the PC anyway just to be safe since the lookup should not be costly.
|
||||||
if ( lce.getCollection() != null ) {
|
if ( lce.getCollection() != null ) {
|
||||||
Object linkedOwner = lce.getCollection().getOwner();
|
final Object linkedOwner = lce.getCollection().getOwner();
|
||||||
if ( linkedOwner != null ) {
|
if ( linkedOwner != null ) {
|
||||||
final Serializable ownerKey = persister.getOwnerEntityPersister().getIdentifier( linkedOwner, session );
|
final Serializable ownerKey = persister.getOwnerEntityPersister().getIdentifier( linkedOwner, session );
|
||||||
collectionOwner = getLoadContext().getPersistenceContext().getCollectionOwner( ownerKey, persister );
|
collectionOwner = getLoadContext().getPersistenceContext().getCollectionOwner( ownerKey, persister );
|
||||||
|
@ -338,11 +349,11 @@ public class CollectionLoadContext {
|
||||||
version = null;
|
version = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
CollectionCacheEntry entry = new CollectionCacheEntry( lce.getCollection(), persister );
|
final CollectionCacheEntry entry = new CollectionCacheEntry( lce.getCollection(), persister );
|
||||||
CacheKey cacheKey = session.generateCacheKey( lce.getKey(), persister.getKeyType(), persister.getRole() );
|
final CacheKey cacheKey = session.generateCacheKey( lce.getKey(), persister.getKeyType(), persister.getRole() );
|
||||||
boolean put = persister.getCacheAccessStrategy().putFromLoad(
|
final boolean put = persister.getCacheAccessStrategy().putFromLoad(
|
||||||
cacheKey,
|
cacheKey,
|
||||||
persister.getCacheEntryStructure().structure(entry),
|
persister.getCacheEntryStructure().structure( entry ),
|
||||||
session.getTimestamp(),
|
session.getTimestamp(),
|
||||||
version,
|
version,
|
||||||
factory.getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() && session.getCacheMode()!= CacheMode.REFRESH
|
factory.getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() && session.getCacheMode()!= CacheMode.REFRESH
|
||||||
|
@ -363,7 +374,7 @@ public class CollectionLoadContext {
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return super.toString() + "<rs=" + resultSet + ">";
|
return super.toString() + "<rs=" + resultSet + ">";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,8 @@ public class EntityLoadContext {
|
||||||
|
|
||||||
private final LoadContexts loadContexts;
|
private final LoadContexts loadContexts;
|
||||||
private final ResultSet resultSet;
|
private final ResultSet resultSet;
|
||||||
private final List hydratingEntities = new ArrayList( 20 ); // todo : need map? the prob is a proper key, right?
|
// todo : need map? the prob is a proper key, right?
|
||||||
|
private final List hydratingEntities = new ArrayList( 20 );
|
||||||
|
|
||||||
public EntityLoadContext(LoadContexts loadContexts, ResultSet resultSet) {
|
public EntityLoadContext(LoadContexts loadContexts, ResultSet resultSet) {
|
||||||
this.loadContexts = loadContexts;
|
this.loadContexts = loadContexts;
|
||||||
|
|
|
@ -30,24 +30,18 @@ import java.util.IdentityHashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
|
||||||
|
|
||||||
import org.hibernate.collection.spi.PersistentCollection;
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
import org.hibernate.engine.spi.CollectionKey;
|
import org.hibernate.engine.spi.CollectionKey;
|
||||||
import org.hibernate.engine.spi.PersistenceContext;
|
import org.hibernate.engine.spi.PersistenceContext;
|
||||||
import org.hibernate.engine.spi.SessionImplementor;
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
|
import org.hibernate.internal.CoreLogging;
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.internal.util.collections.IdentityMap;
|
|
||||||
import org.hibernate.persister.collection.CollectionPersister;
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
import org.hibernate.pretty.MessageHelper;
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maps {@link ResultSet result-sets} to specific contextual data related to processing that result set
|
* Maps {@link ResultSet result-sets} to specific contextual data related to processing that result set
|
||||||
* <p/>
|
* <p/>
|
||||||
* Implementation note: internally an {@link IdentityMap} is used to maintain the mappings mainly because I'd
|
|
||||||
* rather not be dependent upon potentially bad {@link Object#equals} and {@link Object#hashCode} implementations on
|
|
||||||
* the JDBC result sets
|
|
||||||
* <p/>
|
|
||||||
* Considering the JDBC-redesign work, would further like this contextual info not mapped separately, but available
|
* Considering the JDBC-redesign work, would further like this contextual info not mapped separately, but available
|
||||||
* based on the result set being processed. This would also allow maintaining a single mapping as we could reliably
|
* based on the result set being processed. This would also allow maintaining a single mapping as we could reliably
|
||||||
* get notification of the result-set closing...
|
* get notification of the result-set closing...
|
||||||
|
@ -55,8 +49,7 @@ import org.hibernate.pretty.MessageHelper;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class LoadContexts {
|
public class LoadContexts {
|
||||||
|
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( LoadContexts.class );
|
||||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, LoadContexts.class.getName());
|
|
||||||
|
|
||||||
private final PersistenceContext persistenceContext;
|
private final PersistenceContext persistenceContext;
|
||||||
private Map<ResultSet,CollectionLoadContext> collectionLoadContexts;
|
private Map<ResultSet,CollectionLoadContext> collectionLoadContexts;
|
||||||
|
@ -101,11 +94,11 @@ public class LoadContexts {
|
||||||
*/
|
*/
|
||||||
public void cleanup(ResultSet resultSet) {
|
public void cleanup(ResultSet resultSet) {
|
||||||
if ( collectionLoadContexts != null ) {
|
if ( collectionLoadContexts != null ) {
|
||||||
CollectionLoadContext collectionLoadContext = collectionLoadContexts.remove( resultSet );
|
final CollectionLoadContext collectionLoadContext = collectionLoadContexts.remove( resultSet );
|
||||||
collectionLoadContext.cleanup();
|
collectionLoadContext.cleanup();
|
||||||
}
|
}
|
||||||
if ( entityLoadContexts != null ) {
|
if ( entityLoadContexts != null ) {
|
||||||
EntityLoadContext entityLoadContext = entityLoadContexts.remove( resultSet );
|
final EntityLoadContext entityLoadContext = entityLoadContexts.remove( resultSet );
|
||||||
entityLoadContext.cleanup();
|
entityLoadContext.cleanup();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -191,7 +184,7 @@ public class LoadContexts {
|
||||||
* @return The loading collection, or null if not found.
|
* @return The loading collection, or null if not found.
|
||||||
*/
|
*/
|
||||||
public PersistentCollection locateLoadingCollection(CollectionPersister persister, Serializable ownerKey) {
|
public PersistentCollection locateLoadingCollection(CollectionPersister persister, Serializable ownerKey) {
|
||||||
LoadingCollectionEntry lce = locateLoadingCollectionEntry( new CollectionKey( persister, ownerKey ) );
|
final LoadingCollectionEntry lce = locateLoadingCollectionEntry( new CollectionKey( persister, ownerKey ) );
|
||||||
if ( lce != null ) {
|
if ( lce != null ) {
|
||||||
if ( LOG.isTraceEnabled() ) {
|
if ( LOG.isTraceEnabled() ) {
|
||||||
LOG.tracef(
|
LOG.tracef(
|
||||||
|
@ -246,13 +239,13 @@ public class LoadContexts {
|
||||||
if ( !hasRegisteredLoadingCollectionEntries() ) {
|
if ( !hasRegisteredLoadingCollectionEntries() ) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
xrefLoadingCollectionEntries.remove(key);
|
xrefLoadingCollectionEntries.remove( key );
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings( {"UnusedDeclaration"})
|
@SuppressWarnings( {"UnusedDeclaration"})
|
||||||
Map getLoadingCollectionXRefs() {
|
Map getLoadingCollectionXRefs() {
|
||||||
return xrefLoadingCollectionEntries;
|
return xrefLoadingCollectionEntries;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -271,7 +264,7 @@ public class LoadContexts {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
LOG.tracev( "Attempting to locate loading collection entry [{0}] in any result-set context", key );
|
LOG.tracev( "Attempting to locate loading collection entry [{0}] in any result-set context", key );
|
||||||
LoadingCollectionEntry rtn = xrefLoadingCollectionEntries.get( key );
|
final LoadingCollectionEntry rtn = xrefLoadingCollectionEntries.get( key );
|
||||||
if ( rtn == null ) {
|
if ( rtn == null ) {
|
||||||
LOG.tracev( "Collection [{0}] not located in load context", key );
|
LOG.tracev( "Collection [{0}] not located in load context", key );
|
||||||
}
|
}
|
||||||
|
@ -291,6 +284,13 @@ public class LoadContexts {
|
||||||
// Entity load contexts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// Entity load contexts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
// * currently, not yet used...
|
// * currently, not yet used...
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Currently unused
|
||||||
|
*
|
||||||
|
* @param resultSet The result set
|
||||||
|
*
|
||||||
|
* @return The entity load context
|
||||||
|
*/
|
||||||
@SuppressWarnings( {"UnusedDeclaration"})
|
@SuppressWarnings( {"UnusedDeclaration"})
|
||||||
public EntityLoadContext getEntityLoadContext(ResultSet resultSet) {
|
public EntityLoadContext getEntityLoadContext(ResultSet resultSet) {
|
||||||
EntityLoadContext context = null;
|
EntityLoadContext context = null;
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class LoadingCollectionEntry {
|
||||||
private final Serializable key;
|
private final Serializable key;
|
||||||
private final PersistentCollection collection;
|
private final PersistentCollection collection;
|
||||||
|
|
||||||
public LoadingCollectionEntry(
|
LoadingCollectionEntry(
|
||||||
ResultSet resultSet,
|
ResultSet resultSet,
|
||||||
CollectionPersister persister,
|
CollectionPersister persister,
|
||||||
Serializable key,
|
Serializable key,
|
||||||
|
@ -68,6 +68,7 @@ public class LoadingCollectionEntry {
|
||||||
return collection;
|
return collection;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return getClass().getName() + "<rs=" + resultSet + ", coll=" + MessageHelper.collectionInfoString( persister.getRole(), key ) + ">@" + Integer.toHexString( hashCode() );
|
return getClass().getName() + "<rs=" + resultSet + ", coll=" + MessageHelper.collectionInfoString( persister.getRole(), key ) + ">@" + Integer.toHexString( hashCode() );
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,11 +21,8 @@
|
||||||
* 51 Franklin Street, Fifth Floor
|
* 51 Franklin Street, Fifth Floor
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*/
|
*/
|
||||||
package org.hibernate.persister.walking.spi;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Gail Badner
|
* Internal classes used to track loading of data, potentially across multiple ResultSets
|
||||||
*/
|
*/
|
||||||
public interface CompositionElementDefinition extends CompositionDefinition{
|
package org.hibernate.engine.loading.internal;
|
||||||
public CollectionDefinition getCollectionDefinition();
|
|
||||||
}
|
|
|
@ -34,6 +34,12 @@ public class Association {
|
||||||
private final String associationPath;
|
private final String associationPath;
|
||||||
private final String role;
|
private final String role;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a association defining what is to be fetched.
|
||||||
|
*
|
||||||
|
* @param owner The entity owning the association
|
||||||
|
* @param associationPath The path of the association, from the entity
|
||||||
|
*/
|
||||||
public Association(EntityPersister owner, String associationPath) {
|
public Association(EntityPersister owner, String associationPath) {
|
||||||
this.owner = owner;
|
this.owner = owner;
|
||||||
this.associationPath = associationPath;
|
this.associationPath = associationPath;
|
||||||
|
|
|
@ -23,7 +23,6 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.profile;
|
package org.hibernate.engine.profile;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Models an individual fetch within a profile.
|
* Models an individual fetch within a profile.
|
||||||
*
|
*
|
||||||
|
@ -33,6 +32,12 @@ public class Fetch {
|
||||||
private final Association association;
|
private final Association association;
|
||||||
private final Style style;
|
private final Style style;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a Fetch
|
||||||
|
*
|
||||||
|
* @param association The association to be fetched
|
||||||
|
* @param style How to fetch it
|
||||||
|
*/
|
||||||
public Fetch(Association association, Style style) {
|
public Fetch(Association association, Style style) {
|
||||||
this.association = association;
|
this.association = association;
|
||||||
this.style = style;
|
this.style = style;
|
||||||
|
@ -54,7 +59,13 @@ public class Fetch {
|
||||||
* needed for other things as well anyway).
|
* needed for other things as well anyway).
|
||||||
*/
|
*/
|
||||||
public enum Style {
|
public enum Style {
|
||||||
|
/**
|
||||||
|
* Fetch via a join
|
||||||
|
*/
|
||||||
JOIN( "join" ),
|
JOIN( "join" ),
|
||||||
|
/**
|
||||||
|
* Fetch via a subsequent select
|
||||||
|
*/
|
||||||
SELECT( "select" );
|
SELECT( "select" );
|
||||||
|
|
||||||
private final String name;
|
private final String name;
|
||||||
|
@ -63,10 +74,18 @@ public class Fetch {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a style given an externalized string representation
|
||||||
|
*
|
||||||
|
* @param name The externalized representation
|
||||||
|
*
|
||||||
|
* @return The style; {@link #JOIN} is returned if not recognized
|
||||||
|
*/
|
||||||
public static Style parse(String name) {
|
public static Style parse(String name) {
|
||||||
if ( SELECT.name.equals( name ) ) {
|
if ( SELECT.name.equals( name ) ) {
|
||||||
return SELECT;
|
return SELECT;
|
||||||
|
@ -78,6 +97,7 @@ public class Fetch {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "Fetch[" + style + "{" + association.getRole() + "}]";
|
return "Fetch[" + style + "{" + association.getRole() + "}]";
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,12 +22,11 @@
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.profile;
|
package org.hibernate.engine.profile;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
import org.hibernate.internal.CoreLogging;
|
||||||
|
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.type.BagType;
|
import org.hibernate.type.BagType;
|
||||||
import org.hibernate.type.Type;
|
import org.hibernate.type.Type;
|
||||||
|
@ -41,21 +40,17 @@ import org.hibernate.type.Type;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class FetchProfile {
|
public class FetchProfile {
|
||||||
|
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( FetchProfile.class );
|
||||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, FetchProfile.class.getName());
|
|
||||||
|
|
||||||
private final String name;
|
private final String name;
|
||||||
private Map<String,Fetch> fetches = new HashMap<String,Fetch>();
|
private Map<String,Fetch> fetches = new HashMap<String,Fetch>();
|
||||||
|
|
||||||
private boolean containsJoinFetchedCollection = false;
|
private boolean containsJoinFetchedCollection;
|
||||||
private boolean containsJoinFetchedBag = false;
|
private boolean containsJoinFetchedBag;
|
||||||
private Fetch bagJoinFetch;
|
private Fetch bagJoinFetch;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A 'fetch profile' is uniquely named within a
|
* Constructs a FetchProfile, supplying its unique name (unique within the SessionFactory).
|
||||||
* {@link SessionFactoryImplementor SessionFactory}, thus it is also
|
|
||||||
* uniquely and easily identifiable within that
|
|
||||||
* {@link SessionFactoryImplementor SessionFactory}.
|
|
||||||
*
|
*
|
||||||
* @param name The name under which we are bound in the sessionFactory
|
* @param name The name under which we are bound in the sessionFactory
|
||||||
*/
|
*/
|
||||||
|
@ -91,7 +86,7 @@ public class FetchProfile {
|
||||||
*/
|
*/
|
||||||
public void addFetch(final Fetch fetch) {
|
public void addFetch(final Fetch fetch) {
|
||||||
final String fetchAssociactionRole = fetch.getAssociation().getRole();
|
final String fetchAssociactionRole = fetch.getAssociation().getRole();
|
||||||
Type associationType = fetch.getAssociation().getOwner().getPropertyType( fetch.getAssociation().getAssociationPath() );
|
final Type associationType = fetch.getAssociation().getOwner().getPropertyType( fetch.getAssociation().getAssociationPath() );
|
||||||
if ( associationType.isCollectionType() ) {
|
if ( associationType.isCollectionType() ) {
|
||||||
LOG.tracev( "Handling request to add collection fetch [{0}]", fetchAssociactionRole );
|
LOG.tracev( "Handling request to add collection fetch [{0}]", fetchAssociactionRole );
|
||||||
|
|
||||||
|
@ -103,7 +98,8 @@ public class FetchProfile {
|
||||||
if ( BagType.class.isInstance( associationType ) ) {
|
if ( BagType.class.isInstance( associationType ) ) {
|
||||||
if ( containsJoinFetchedCollection ) {
|
if ( containsJoinFetchedCollection ) {
|
||||||
LOG.containsJoinFetchedCollection( fetchAssociactionRole );
|
LOG.containsJoinFetchedCollection( fetchAssociactionRole );
|
||||||
return; // EARLY EXIT!!!
|
// EARLY EXIT!!!
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,6 +140,13 @@ public class FetchProfile {
|
||||||
return fetches;
|
return fetches;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain the fetch associated with the given role.
|
||||||
|
*
|
||||||
|
* @param role The role identifying the fetch
|
||||||
|
*
|
||||||
|
* @return The fetch, or {@code null} if a matching one was not found
|
||||||
|
*/
|
||||||
public Fetch getFetchByRole(String role) {
|
public Fetch getFetchByRole(String role) {
|
||||||
return fetches.get( role );
|
return fetches.get( role );
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
/**
|
||||||
|
* Models the fetch profiles defined by the application
|
||||||
|
*/
|
||||||
|
package org.hibernate.engine.profile;
|
|
@ -1,10 +1,10 @@
|
||||||
/*
|
/*
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
*
|
*
|
||||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
|
||||||
* indicated by the @author tags or express copyright attribution
|
* indicated by the @author tags or express copyright attribution
|
||||||
* statements applied by the authors. All third-party contributions are
|
* statements applied by the authors. All third-party contributions are
|
||||||
* distributed under license by Red Hat Middleware LLC.
|
* distributed under license by Red Hat Inc.
|
||||||
*
|
*
|
||||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
@ -20,9 +20,9 @@
|
||||||
* Free Software Foundation, Inc.
|
* Free Software Foundation, Inc.
|
||||||
* 51 Franklin Street, Fifth Floor
|
* 51 Franklin Street, Fifth Floor
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.query.spi;
|
package org.hibernate.engine.query.spi;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -38,12 +38,21 @@ public class FilterQueryPlan extends HQLQueryPlan implements Serializable {
|
||||||
|
|
||||||
private final String collectionRole;
|
private final String collectionRole;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a query plan for an HQL filter
|
||||||
|
*
|
||||||
|
* @param hql The HQL fragment
|
||||||
|
* @param collectionRole The collection role being filtered
|
||||||
|
* @param shallow Is the query shallow?
|
||||||
|
* @param enabledFilters All enabled filters from the Session
|
||||||
|
* @param factory The factory
|
||||||
|
*/
|
||||||
public FilterQueryPlan(
|
public FilterQueryPlan(
|
||||||
String hql,
|
String hql,
|
||||||
String collectionRole,
|
String collectionRole,
|
||||||
boolean shallow,
|
boolean shallow,
|
||||||
Map enabledFilters,
|
Map enabledFilters,
|
||||||
SessionFactoryImplementor factory) {
|
SessionFactoryImplementor factory) {
|
||||||
super( hql, collectionRole, shallow, enabledFilters, factory );
|
super( hql, collectionRole, shallow, enabledFilters, factory );
|
||||||
this.collectionRole = collectionRole;
|
this.collectionRole = collectionRole;
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,8 +32,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
import org.hibernate.Filter;
|
||||||
|
|
||||||
import org.hibernate.HibernateException;
|
import org.hibernate.HibernateException;
|
||||||
import org.hibernate.QueryException;
|
import org.hibernate.QueryException;
|
||||||
import org.hibernate.ScrollableResults;
|
import org.hibernate.ScrollableResults;
|
||||||
|
@ -47,6 +46,7 @@ import org.hibernate.hql.spi.FilterTranslator;
|
||||||
import org.hibernate.hql.spi.ParameterTranslations;
|
import org.hibernate.hql.spi.ParameterTranslations;
|
||||||
import org.hibernate.hql.spi.QueryTranslator;
|
import org.hibernate.hql.spi.QueryTranslator;
|
||||||
import org.hibernate.hql.spi.QueryTranslatorFactory;
|
import org.hibernate.hql.spi.QueryTranslatorFactory;
|
||||||
|
import org.hibernate.internal.CoreLogging;
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||||
import org.hibernate.internal.util.collections.EmptyIterator;
|
import org.hibernate.internal.util.collections.EmptyIterator;
|
||||||
|
@ -60,11 +60,10 @@ import org.hibernate.type.Type;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class HQLQueryPlan implements Serializable {
|
public class HQLQueryPlan implements Serializable {
|
||||||
|
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( HQLQueryPlan.class );
|
||||||
|
|
||||||
// TODO : keep separate notions of QT[] here for shallow/non-shallow queries...
|
// TODO : keep separate notions of QT[] here for shallow/non-shallow queries...
|
||||||
|
|
||||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, HQLQueryPlan.class.getName());
|
|
||||||
|
|
||||||
private final String sourceQuery;
|
private final String sourceQuery;
|
||||||
private final QueryTranslator[] translators;
|
private final QueryTranslator[] translators;
|
||||||
private final String[] sqlStrings;
|
private final String[] sqlStrings;
|
||||||
|
@ -73,17 +72,32 @@ public class HQLQueryPlan implements Serializable {
|
||||||
private final ReturnMetadata returnMetadata;
|
private final ReturnMetadata returnMetadata;
|
||||||
private final Set querySpaces;
|
private final Set querySpaces;
|
||||||
|
|
||||||
private final Set enabledFilterNames;
|
private final Set<String> enabledFilterNames;
|
||||||
private final boolean shallow;
|
private final boolean shallow;
|
||||||
|
|
||||||
public HQLQueryPlan(String hql, boolean shallow, Map enabledFilters, SessionFactoryImplementor factory) {
|
/**
|
||||||
|
* Constructs a HQLQueryPlan
|
||||||
|
*
|
||||||
|
* @param hql The HQL query
|
||||||
|
* @param shallow Whether the execution is to be shallow or not
|
||||||
|
* @param enabledFilters The enabled filters (we only keep the names)
|
||||||
|
* @param factory The factory
|
||||||
|
*/
|
||||||
|
public HQLQueryPlan(String hql, boolean shallow, Map<String,Filter> enabledFilters, SessionFactoryImplementor factory) {
|
||||||
this( hql, null, shallow, enabledFilters, factory );
|
this( hql, null, shallow, enabledFilters, factory );
|
||||||
}
|
}
|
||||||
|
|
||||||
protected HQLQueryPlan(String hql, String collectionRole, boolean shallow, Map enabledFilters, SessionFactoryImplementor factory){
|
@SuppressWarnings("unchecked")
|
||||||
|
protected HQLQueryPlan(
|
||||||
|
String hql,
|
||||||
|
String collectionRole,
|
||||||
|
boolean shallow,
|
||||||
|
Map<String,Filter> enabledFilters,
|
||||||
|
SessionFactoryImplementor factory) {
|
||||||
this.sourceQuery = hql;
|
this.sourceQuery = hql;
|
||||||
this.shallow = shallow;
|
this.shallow = shallow;
|
||||||
Set copy = new HashSet();
|
|
||||||
|
final Set<String> copy = new HashSet<String>();
|
||||||
copy.addAll( enabledFilters.keySet() );
|
copy.addAll( enabledFilters.keySet() );
|
||||||
this.enabledFilterNames = java.util.Collections.unmodifiableSet( copy );
|
this.enabledFilterNames = java.util.Collections.unmodifiableSet( copy );
|
||||||
|
|
||||||
|
@ -91,8 +105,8 @@ public class HQLQueryPlan implements Serializable {
|
||||||
final int length = concreteQueryStrings.length;
|
final int length = concreteQueryStrings.length;
|
||||||
this.translators = new QueryTranslator[length];
|
this.translators = new QueryTranslator[length];
|
||||||
|
|
||||||
List<String> sqlStringList = new ArrayList<String>();
|
final List<String> sqlStringList = new ArrayList<String>();
|
||||||
Set combinedQuerySpaces = new HashSet();
|
final Set combinedQuerySpaces = new HashSet();
|
||||||
|
|
||||||
final boolean hasCollectionRole = (collectionRole == null);
|
final boolean hasCollectionRole = (collectionRole == null);
|
||||||
final Map querySubstitutions = factory.getSettings().getQuerySubstitutions();
|
final Map querySubstitutions = factory.getSettings().getQuerySubstitutions();
|
||||||
|
@ -107,7 +121,7 @@ public class HQLQueryPlan implements Serializable {
|
||||||
else {
|
else {
|
||||||
translators[i] = queryTranslatorFactory
|
translators[i] = queryTranslatorFactory
|
||||||
.createFilterTranslator( hql, concreteQueryStrings[i], enabledFilters, factory );
|
.createFilterTranslator( hql, concreteQueryStrings[i], enabledFilters, factory );
|
||||||
( ( FilterTranslator ) translators[i] ).compile( collectionRole, querySubstitutions, shallow );
|
( (FilterTranslator) translators[i] ).compile( collectionRole, querySubstitutions, shallow );
|
||||||
}
|
}
|
||||||
combinedQuerySpaces.addAll( translators[i].getQuerySpaces() );
|
combinedQuerySpaces.addAll( translators[i].getQuerySpaces() );
|
||||||
sqlStringList.addAll( translators[i].collectSqlStrings() );
|
sqlStringList.addAll( translators[i].collectSqlStrings() );
|
||||||
|
@ -165,20 +179,33 @@ public class HQLQueryPlan implements Serializable {
|
||||||
return shallow;
|
return shallow;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Coordinates the efforts to perform a list across all the included query translators.
|
||||||
|
*
|
||||||
|
* @param queryParameters The query parameters
|
||||||
|
* @param session The session
|
||||||
|
*
|
||||||
|
* @return The query result list
|
||||||
|
*
|
||||||
|
* @throws HibernateException Indicates a problem performing the query
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
public List performList(
|
public List performList(
|
||||||
QueryParameters queryParameters,
|
QueryParameters queryParameters,
|
||||||
SessionImplementor session) throws HibernateException {
|
SessionImplementor session) throws HibernateException {
|
||||||
if ( LOG.isTraceEnabled() ) {
|
if ( LOG.isTraceEnabled() ) {
|
||||||
LOG.tracev( "Find: {0}", getSourceQuery() );
|
LOG.tracev( "Find: {0}", getSourceQuery() );
|
||||||
queryParameters.traceParameters( session.getFactory() );
|
queryParameters.traceParameters( session.getFactory() );
|
||||||
}
|
}
|
||||||
boolean hasLimit = queryParameters.getRowSelection() != null &&
|
|
||||||
queryParameters.getRowSelection().definesLimits();
|
final boolean hasLimit = queryParameters.getRowSelection() != null
|
||||||
boolean needsLimit = hasLimit && translators.length > 1;
|
&& queryParameters.getRowSelection().definesLimits();
|
||||||
QueryParameters queryParametersToUse;
|
final boolean needsLimit = hasLimit && translators.length > 1;
|
||||||
|
|
||||||
|
final QueryParameters queryParametersToUse;
|
||||||
if ( needsLimit ) {
|
if ( needsLimit ) {
|
||||||
LOG.needsLimit();
|
LOG.needsLimit();
|
||||||
RowSelection selection = new RowSelection();
|
final RowSelection selection = new RowSelection();
|
||||||
selection.setFetchSize( queryParameters.getRowSelection().getFetchSize() );
|
selection.setFetchSize( queryParameters.getRowSelection().getFetchSize() );
|
||||||
selection.setTimeout( queryParameters.getRowSelection().getTimeout() );
|
selection.setTimeout( queryParameters.getRowSelection().getTimeout() );
|
||||||
queryParametersToUse = queryParameters.createCopyUsing( selection );
|
queryParametersToUse = queryParameters.createCopyUsing( selection );
|
||||||
|
@ -187,12 +214,12 @@ public class HQLQueryPlan implements Serializable {
|
||||||
queryParametersToUse = queryParameters;
|
queryParametersToUse = queryParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
List combinedResults = new ArrayList();
|
final List combinedResults = new ArrayList();
|
||||||
IdentitySet distinction = new IdentitySet();
|
final IdentitySet distinction = new IdentitySet();
|
||||||
int includedCount = -1;
|
int includedCount = -1;
|
||||||
translator_loop:
|
translator_loop:
|
||||||
for ( QueryTranslator translator : translators ) {
|
for ( QueryTranslator translator : translators ) {
|
||||||
List tmp = translator.list( session, queryParametersToUse );
|
final List tmp = translator.list( session, queryParametersToUse );
|
||||||
if ( needsLimit ) {
|
if ( needsLimit ) {
|
||||||
// NOTE : firstRow is zero-based
|
// NOTE : firstRow is zero-based
|
||||||
final int first = queryParameters.getRowSelection().getFirstRow() == null
|
final int first = queryParameters.getRowSelection().getFirstRow() == null
|
||||||
|
@ -223,9 +250,20 @@ public class HQLQueryPlan implements Serializable {
|
||||||
return combinedResults;
|
return combinedResults;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Coordinates the efforts to perform an iterate across all the included query translators.
|
||||||
|
*
|
||||||
|
* @param queryParameters The query parameters
|
||||||
|
* @param session The session
|
||||||
|
*
|
||||||
|
* @return The query result iterator
|
||||||
|
*
|
||||||
|
* @throws HibernateException Indicates a problem performing the query
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
public Iterator performIterate(
|
public Iterator performIterate(
|
||||||
QueryParameters queryParameters,
|
QueryParameters queryParameters,
|
||||||
EventSource session) throws HibernateException {
|
EventSource session) throws HibernateException {
|
||||||
if ( LOG.isTraceEnabled() ) {
|
if ( LOG.isTraceEnabled() ) {
|
||||||
LOG.tracev( "Iterate: {0}", getSourceQuery() );
|
LOG.tracev( "Iterate: {0}", getSourceQuery() );
|
||||||
queryParameters.traceParameters( session.getFactory() );
|
queryParameters.traceParameters( session.getFactory() );
|
||||||
|
@ -234,8 +272,8 @@ public class HQLQueryPlan implements Serializable {
|
||||||
return EmptyIterator.INSTANCE;
|
return EmptyIterator.INSTANCE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final boolean many = translators.length > 1;
|
||||||
Iterator[] results = null;
|
Iterator[] results = null;
|
||||||
boolean many = translators.length > 1;
|
|
||||||
if ( many ) {
|
if ( many ) {
|
||||||
results = new Iterator[translators.length];
|
results = new Iterator[translators.length];
|
||||||
}
|
}
|
||||||
|
@ -248,12 +286,22 @@ public class HQLQueryPlan implements Serializable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return many ? new JoinedIterator(results) : result;
|
return many ? new JoinedIterator( results ) : result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Coordinates the efforts to perform a scroll across all the included query translators.
|
||||||
|
*
|
||||||
|
* @param queryParameters The query parameters
|
||||||
|
* @param session The session
|
||||||
|
*
|
||||||
|
* @return The query result iterator
|
||||||
|
*
|
||||||
|
* @throws HibernateException Indicates a problem performing the query
|
||||||
|
*/
|
||||||
public ScrollableResults performScroll(
|
public ScrollableResults performScroll(
|
||||||
QueryParameters queryParameters,
|
QueryParameters queryParameters,
|
||||||
SessionImplementor session) throws HibernateException {
|
SessionImplementor session) throws HibernateException {
|
||||||
if ( LOG.isTraceEnabled() ) {
|
if ( LOG.isTraceEnabled() ) {
|
||||||
LOG.tracev( "Iterate: {0}", getSourceQuery() );
|
LOG.tracev( "Iterate: {0}", getSourceQuery() );
|
||||||
queryParameters.traceParameters( session.getFactory() );
|
queryParameters.traceParameters( session.getFactory() );
|
||||||
|
@ -268,6 +316,16 @@ public class HQLQueryPlan implements Serializable {
|
||||||
return translators[0].scroll( queryParameters, session );
|
return translators[0].scroll( queryParameters, session );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Coordinates the efforts to perform an execution across all the included query translators.
|
||||||
|
*
|
||||||
|
* @param queryParameters The query parameters
|
||||||
|
* @param session The session
|
||||||
|
*
|
||||||
|
* @return The aggregated "affected row" count
|
||||||
|
*
|
||||||
|
* @throws HibernateException Indicates a problem performing the execution
|
||||||
|
*/
|
||||||
public int performExecuteUpdate(QueryParameters queryParameters, SessionImplementor session)
|
public int performExecuteUpdate(QueryParameters queryParameters, SessionImplementor session)
|
||||||
throws HibernateException {
|
throws HibernateException {
|
||||||
if ( LOG.isTraceEnabled() ) {
|
if ( LOG.isTraceEnabled() ) {
|
||||||
|
@ -285,32 +343,34 @@ public class HQLQueryPlan implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private ParameterMetadata buildParameterMetadata(ParameterTranslations parameterTranslations, String hql) {
|
private ParameterMetadata buildParameterMetadata(ParameterTranslations parameterTranslations, String hql) {
|
||||||
long start = System.currentTimeMillis();
|
final long start = System.currentTimeMillis();
|
||||||
ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( hql );
|
final ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( hql );
|
||||||
long end = System.currentTimeMillis();
|
final long end = System.currentTimeMillis();
|
||||||
|
|
||||||
if ( LOG.isTraceEnabled() ) {
|
if ( LOG.isTraceEnabled() ) {
|
||||||
LOG.tracev( "HQL param location recognition took {0} mills ({1})", ( end - start ), hql );
|
LOG.tracev( "HQL param location recognition took {0} mills ({1})", ( end - start ), hql );
|
||||||
}
|
}
|
||||||
|
|
||||||
int ordinalParamCount = parameterTranslations.getOrdinalParameterCount();
|
int ordinalParamCount = parameterTranslations.getOrdinalParameterCount();
|
||||||
int[] locations = ArrayHelper.toIntArray( recognizer.getOrdinalParameterLocationList() );
|
final int[] locations = ArrayHelper.toIntArray( recognizer.getOrdinalParameterLocationList() );
|
||||||
if ( parameterTranslations.supportsOrdinalParameterMetadata() && locations.length != ordinalParamCount ) {
|
if ( parameterTranslations.supportsOrdinalParameterMetadata() && locations.length != ordinalParamCount ) {
|
||||||
throw new HibernateException( "ordinal parameter mismatch" );
|
throw new HibernateException( "ordinal parameter mismatch" );
|
||||||
}
|
}
|
||||||
ordinalParamCount = locations.length;
|
ordinalParamCount = locations.length;
|
||||||
OrdinalParameterDescriptor[] ordinalParamDescriptors = new OrdinalParameterDescriptor[ordinalParamCount];
|
|
||||||
|
final OrdinalParameterDescriptor[] ordinalParamDescriptors = new OrdinalParameterDescriptor[ordinalParamCount];
|
||||||
for ( int i = 1; i <= ordinalParamCount; i++ ) {
|
for ( int i = 1; i <= ordinalParamCount; i++ ) {
|
||||||
ordinalParamDescriptors[ i - 1 ] = new OrdinalParameterDescriptor(
|
ordinalParamDescriptors[ i - 1 ] = new OrdinalParameterDescriptor(
|
||||||
i,
|
i,
|
||||||
parameterTranslations.supportsOrdinalParameterMetadata()
|
parameterTranslations.supportsOrdinalParameterMetadata()
|
||||||
? parameterTranslations.getOrdinalParameterExpectedType( i )
|
? parameterTranslations.getOrdinalParameterExpectedType( i )
|
||||||
: null,
|
: null,
|
||||||
locations[ i - 1 ]
|
locations[ i - 1 ]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, NamedParameterDescriptor> namedParamDescriptorMap = new HashMap<String, NamedParameterDescriptor>();
|
final Map<String, NamedParameterDescriptor> namedParamDescriptorMap = new HashMap<String, NamedParameterDescriptor>();
|
||||||
Map<String, ParamLocationRecognizer.NamedParameterDescription> map = recognizer.getNamedParameterDescriptionMap();
|
final Map<String, ParamLocationRecognizer.NamedParameterDescription> map = recognizer.getNamedParameterDescriptionMap();
|
||||||
for ( final String name : map.keySet() ) {
|
for ( final String name : map.keySet() ) {
|
||||||
final ParamLocationRecognizer.NamedParameterDescription description = map.get( name );
|
final ParamLocationRecognizer.NamedParameterDescription description = map.get( name );
|
||||||
namedParamDescriptorMap.put(
|
namedParamDescriptorMap.put(
|
||||||
|
@ -325,9 +385,15 @@ public class HQLQueryPlan implements Serializable {
|
||||||
}
|
}
|
||||||
return new ParameterMetadata( ordinalParamDescriptors, namedParamDescriptorMap );
|
return new ParameterMetadata( ordinalParamDescriptors, namedParamDescriptorMap );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Access to the underlying translators associated with this query
|
||||||
|
*
|
||||||
|
* @return The translators
|
||||||
|
*/
|
||||||
public QueryTranslator[] getTranslators() {
|
public QueryTranslator[] getTranslators() {
|
||||||
QueryTranslator[] copy = new QueryTranslator[translators.length];
|
final QueryTranslator[] copy = new QueryTranslator[translators.length];
|
||||||
System.arraycopy(translators, 0, copy, 0, copy.length);
|
System.arraycopy( translators, 0, copy, 0, copy.length );
|
||||||
return copy;
|
return copy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
/*
|
/*
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
*
|
*
|
||||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
|
||||||
* indicated by the @author tags or express copyright attribution
|
* indicated by the @author tags or express copyright attribution
|
||||||
* statements applied by the authors. All third-party contributions are
|
* statements applied by the authors. All third-party contributions are
|
||||||
* distributed under license by Red Hat Middleware LLC.
|
* distributed under license by Red Hat Inc.
|
||||||
*
|
*
|
||||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
@ -20,9 +20,9 @@
|
||||||
* Free Software Foundation, Inc.
|
* Free Software Foundation, Inc.
|
||||||
* 51 Franklin Street, Fifth Floor
|
* 51 Franklin Street, Fifth Floor
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.query.spi;
|
package org.hibernate.engine.query.spi;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
||||||
import org.hibernate.type.Type;
|
import org.hibernate.type.Type;
|
||||||
|
@ -38,6 +38,14 @@ public class NamedParameterDescriptor implements Serializable {
|
||||||
private final int[] sourceLocations;
|
private final int[] sourceLocations;
|
||||||
private final boolean jpaStyle;
|
private final boolean jpaStyle;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a NamedParameterDescriptor
|
||||||
|
*
|
||||||
|
* @param name The name of the parameter
|
||||||
|
* @param expectedType The expected type of the parameter, according to the translator
|
||||||
|
* @param sourceLocations The locations of the named parameters (aye aye aye)
|
||||||
|
* @param jpaStyle Was the parameter a JPA style "named parameter"?
|
||||||
|
*/
|
||||||
public NamedParameterDescriptor(String name, Type expectedType, int[] sourceLocations, boolean jpaStyle) {
|
public NamedParameterDescriptor(String name, Type expectedType, int[] sourceLocations, boolean jpaStyle) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.expectedType = expectedType;
|
this.expectedType = expectedType;
|
||||||
|
@ -61,6 +69,11 @@ public class NamedParameterDescriptor implements Serializable {
|
||||||
return jpaStyle;
|
return jpaStyle;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the parameters expected type
|
||||||
|
*
|
||||||
|
* @param type The new expected type
|
||||||
|
*/
|
||||||
public void resetExpectedType(Type type) {
|
public void resetExpectedType(Type type) {
|
||||||
this.expectedType = type;
|
this.expectedType = type;
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,8 +30,6 @@ import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
|
||||||
|
|
||||||
import org.hibernate.HibernateException;
|
import org.hibernate.HibernateException;
|
||||||
import org.hibernate.QueryException;
|
import org.hibernate.QueryException;
|
||||||
import org.hibernate.action.internal.BulkOperationCleanupAction;
|
import org.hibernate.action.internal.BulkOperationCleanupAction;
|
||||||
|
@ -41,6 +39,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.engine.spi.SessionImplementor;
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
import org.hibernate.engine.spi.TypedValue;
|
import org.hibernate.engine.spi.TypedValue;
|
||||||
import org.hibernate.event.spi.EventSource;
|
import org.hibernate.event.spi.EventSource;
|
||||||
|
import org.hibernate.internal.CoreLogging;
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||||
import org.hibernate.loader.custom.sql.SQLCustomQuery;
|
import org.hibernate.loader.custom.sql.SQLCustomQuery;
|
||||||
|
@ -52,10 +51,7 @@ import org.hibernate.type.Type;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class NativeSQLQueryPlan implements Serializable {
|
public class NativeSQLQueryPlan implements Serializable {
|
||||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
|
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( NativeSQLQueryPlan.class );
|
||||||
CoreMessageLogger.class,
|
|
||||||
NativeSQLQueryPlan.class.getName()
|
|
||||||
);
|
|
||||||
|
|
||||||
private final String sourceQuery;
|
private final String sourceQuery;
|
||||||
private final SQLCustomQuery customQuery;
|
private final SQLCustomQuery customQuery;
|
||||||
|
@ -87,7 +83,7 @@ public class NativeSQLQueryPlan implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private int[] getNamedParameterLocs(String name) throws QueryException {
|
private int[] getNamedParameterLocs(String name) throws QueryException {
|
||||||
Object loc = customQuery.getNamedParameterBindPoints().get( name );
|
final Object loc = customQuery.getNamedParameterBindPoints().get( name );
|
||||||
if ( loc == null ) {
|
if ( loc == null ) {
|
||||||
throw new QueryException(
|
throw new QueryException(
|
||||||
"Named parameter does not appear in Query: " + name,
|
"Named parameter does not appear in Query: " + name,
|
||||||
|
@ -154,60 +150,73 @@ public class NativeSQLQueryPlan implements Serializable {
|
||||||
final SessionImplementor session) throws SQLException {
|
final SessionImplementor session) throws SQLException {
|
||||||
if ( namedParams != null ) {
|
if ( namedParams != null ) {
|
||||||
// assumes that types are all of span 1
|
// assumes that types are all of span 1
|
||||||
Iterator iter = namedParams.entrySet().iterator();
|
final Iterator iter = namedParams.entrySet().iterator();
|
||||||
int result = 0;
|
int result = 0;
|
||||||
while ( iter.hasNext() ) {
|
while ( iter.hasNext() ) {
|
||||||
Map.Entry e = (Map.Entry) iter.next();
|
final Map.Entry e = (Map.Entry) iter.next();
|
||||||
String name = (String) e.getKey();
|
final String name = (String) e.getKey();
|
||||||
TypedValue typedval = (TypedValue) e.getValue();
|
final TypedValue typedval = (TypedValue) e.getValue();
|
||||||
int[] locs = getNamedParameterLocs( name );
|
final int[] locs = getNamedParameterLocs( name );
|
||||||
for (int i = 0; i < locs.length; i++) {
|
for ( int loc : locs ) {
|
||||||
LOG.debugf("bindNamedParameters() %s -> %s [%s]", typedval.getValue(), name, locs[i] + start);
|
LOG.debugf( "bindNamedParameters() %s -> %s [%s]", typedval.getValue(), name, loc + start );
|
||||||
typedval.getType().nullSafeSet( ps, typedval.getValue(),
|
typedval.getType().nullSafeSet(
|
||||||
locs[i] + start, session );
|
ps,
|
||||||
|
typedval.getValue(),
|
||||||
|
loc + start,
|
||||||
|
session
|
||||||
|
);
|
||||||
}
|
}
|
||||||
result += locs.length;
|
result += locs.length;
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
return 0;
|
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void coordinateSharedCacheCleanup(SessionImplementor session) {
|
protected void coordinateSharedCacheCleanup(SessionImplementor session) {
|
||||||
BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getCustomQuery().getQuerySpaces() );
|
final BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getCustomQuery().getQuerySpaces() );
|
||||||
|
|
||||||
if ( session.isEventSource() ) {
|
if ( session.isEventSource() ) {
|
||||||
( ( EventSource ) session ).getActionQueue().addAction( action );
|
( (EventSource) session ).getActionQueue().addAction( action );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
|
action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public int performExecuteUpdate(QueryParameters queryParameters,
|
/**
|
||||||
|
* Performs the execute query
|
||||||
|
*
|
||||||
|
* @param queryParameters The query parameters
|
||||||
|
* @param session The session
|
||||||
|
*
|
||||||
|
* @return The number of affected rows as returned by the JDBC driver
|
||||||
|
*
|
||||||
|
* @throws HibernateException Indicates a problem performing the query execution
|
||||||
|
*/
|
||||||
|
public int performExecuteUpdate(
|
||||||
|
QueryParameters queryParameters,
|
||||||
SessionImplementor session) throws HibernateException {
|
SessionImplementor session) throws HibernateException {
|
||||||
|
|
||||||
coordinateSharedCacheCleanup( session );
|
coordinateSharedCacheCleanup( session );
|
||||||
|
|
||||||
if(queryParameters.isCallable()) {
|
if ( queryParameters.isCallable() ) {
|
||||||
throw new IllegalArgumentException("callable not yet supported for native queries");
|
throw new IllegalArgumentException("callable not yet supported for native queries");
|
||||||
}
|
}
|
||||||
|
|
||||||
int result = 0;
|
int result = 0;
|
||||||
PreparedStatement ps;
|
PreparedStatement ps;
|
||||||
try {
|
try {
|
||||||
queryParameters.processFilters( this.customQuery.getSQL(),
|
queryParameters.processFilters( this.customQuery.getSQL(), session );
|
||||||
session );
|
final String sql = queryParameters.getFilteredSQL();
|
||||||
String sql = queryParameters.getFilteredSQL();
|
|
||||||
|
|
||||||
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false );
|
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false );
|
||||||
|
|
||||||
try {
|
try {
|
||||||
int col = 1;
|
int col = 1;
|
||||||
col += bindPositionalParameters( ps, queryParameters, col,
|
col += bindPositionalParameters( ps, queryParameters, col, session );
|
||||||
session );
|
col += bindNamedParameters( ps, queryParameters.getNamedParameters(), col, session );
|
||||||
col += bindNamedParameters( ps, queryParameters
|
|
||||||
.getNamedParameters(), col, session );
|
|
||||||
result = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().executeUpdate( ps );
|
result = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().executeUpdate( ps );
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
|
@ -218,7 +227,10 @@ public class NativeSQLQueryPlan implements Serializable {
|
||||||
}
|
}
|
||||||
catch (SQLException sqle) {
|
catch (SQLException sqle) {
|
||||||
throw session.getFactory().getSQLExceptionHelper().convert(
|
throw session.getFactory().getSQLExceptionHelper().convert(
|
||||||
sqle, "could not execute native bulk manipulation query", this.sourceQuery );
|
sqle,
|
||||||
|
"could not execute native bulk manipulation query",
|
||||||
|
this.sourceQuery
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
/*
|
/*
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
*
|
*
|
||||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
|
||||||
* indicated by the @author tags or express copyright attribution
|
* indicated by the @author tags or express copyright attribution
|
||||||
* statements applied by the authors. All third-party contributions are
|
* statements applied by the authors. All third-party contributions are
|
||||||
* distributed under license by Red Hat Middleware LLC.
|
* distributed under license by Red Hat Inc.
|
||||||
*
|
*
|
||||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
@ -20,14 +20,16 @@
|
||||||
* Free Software Foundation, Inc.
|
* Free Software Foundation, Inc.
|
||||||
* 51 Franklin Street, Fifth Floor
|
* 51 Franklin Street, Fifth Floor
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.query.spi;
|
package org.hibernate.engine.query.spi;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
||||||
import org.hibernate.type.Type;
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Descriptor regarding an ordinal parameter.
|
||||||
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class OrdinalParameterDescriptor implements Serializable {
|
public class OrdinalParameterDescriptor implements Serializable {
|
||||||
|
@ -35,6 +37,13 @@ public class OrdinalParameterDescriptor implements Serializable {
|
||||||
private final Type expectedType;
|
private final Type expectedType;
|
||||||
private final int sourceLocation;
|
private final int sourceLocation;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs an ordinal parameter descriptor.
|
||||||
|
*
|
||||||
|
* @param ordinalPosition The ordinal position
|
||||||
|
* @param expectedType The expected type of the parameter
|
||||||
|
* @param sourceLocation The location of the parameter
|
||||||
|
*/
|
||||||
public OrdinalParameterDescriptor(int ordinalPosition, Type expectedType, int sourceLocation) {
|
public OrdinalParameterDescriptor(int ordinalPosition, Type expectedType, int sourceLocation) {
|
||||||
this.ordinalPosition = ordinalPosition;
|
this.ordinalPosition = ordinalPosition;
|
||||||
this.expectedType = expectedType;
|
this.expectedType = expectedType;
|
||||||
|
|
|
@ -38,12 +38,14 @@ import org.hibernate.internal.util.collections.ArrayHelper;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ParamLocationRecognizer implements ParameterParser.Recognizer {
|
public class ParamLocationRecognizer implements ParameterParser.Recognizer {
|
||||||
|
/**
|
||||||
|
* Internal representation of a recognized named parameter
|
||||||
|
*/
|
||||||
public static class NamedParameterDescription {
|
public static class NamedParameterDescription {
|
||||||
private final boolean jpaStyle;
|
private final boolean jpaStyle;
|
||||||
private final List<Integer> positions = new ArrayList<Integer>();
|
private final List<Integer> positions = new ArrayList<Integer>();
|
||||||
|
|
||||||
public NamedParameterDescription(boolean jpaStyle) {
|
NamedParameterDescription(boolean jpaStyle) {
|
||||||
this.jpaStyle = jpaStyle;
|
this.jpaStyle = jpaStyle;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +73,7 @@ public class ParamLocationRecognizer implements ParameterParser.Recognizer {
|
||||||
* @return The generated recognizer, with journaled location info.
|
* @return The generated recognizer, with journaled location info.
|
||||||
*/
|
*/
|
||||||
public static ParamLocationRecognizer parseLocations(String query) {
|
public static ParamLocationRecognizer parseLocations(String query) {
|
||||||
ParamLocationRecognizer recognizer = new ParamLocationRecognizer();
|
final ParamLocationRecognizer recognizer = new ParamLocationRecognizer();
|
||||||
ParameterParser.parse( query, recognizer );
|
ParameterParser.parse( query, recognizer );
|
||||||
return recognizer;
|
return recognizer;
|
||||||
}
|
}
|
||||||
|
@ -88,8 +90,8 @@ public class ParamLocationRecognizer implements ParameterParser.Recognizer {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the list of ordinal parameter locations. The list elements
|
* Returns the list of ordinal parameter locations. The list elements
|
||||||
* are Integers, representing the location for that given ordinal. Thus
|
* are Integers, representing the location for that given ordinal. Thus calling
|
||||||
* {@link #getOrdinalParameterLocationList()}.elementAt(n) represents the
|
* {@code getOrdinalParameterLocationList().elementAt(n)} represents the
|
||||||
* location for the nth parameter.
|
* location for the nth parameter.
|
||||||
*
|
*
|
||||||
* @return The list of ordinal parameter locations.
|
* @return The list of ordinal parameter locations.
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
/*
|
/*
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
*
|
*
|
||||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
|
||||||
* indicated by the @author tags or express copyright attribution
|
* indicated by the @author tags or express copyright attribution
|
||||||
* statements applied by the authors. All third-party contributions are
|
* statements applied by the authors. All third-party contributions are
|
||||||
* distributed under license by Red Hat Middleware LLC.
|
* distributed under license by Red Hat Inc.
|
||||||
*
|
*
|
||||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
@ -20,9 +20,9 @@
|
||||||
* Free Software Foundation, Inc.
|
* Free Software Foundation, Inc.
|
||||||
* 51 Franklin Street, Fifth Floor
|
* 51 Franklin Street, Fifth Floor
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.query.spi;
|
package org.hibernate.engine.query.spi;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -37,33 +37,35 @@ import org.hibernate.type.Type;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ParameterMetadata implements Serializable {
|
public class ParameterMetadata implements Serializable {
|
||||||
|
|
||||||
private static final OrdinalParameterDescriptor[] EMPTY_ORDINALS = new OrdinalParameterDescriptor[0];
|
private static final OrdinalParameterDescriptor[] EMPTY_ORDINALS = new OrdinalParameterDescriptor[0];
|
||||||
|
|
||||||
private final OrdinalParameterDescriptor[] ordinalDescriptors;
|
private final OrdinalParameterDescriptor[] ordinalDescriptors;
|
||||||
private final Map namedDescriptorMap;
|
private final Map<String,NamedParameterDescriptor> namedDescriptorMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Instantiates a ParameterMetadata container.
|
* Instantiates a ParameterMetadata container.
|
||||||
*
|
*
|
||||||
* @param ordinalDescriptors
|
* @param ordinalDescriptors Descriptors of the ordinal parameters
|
||||||
* @param namedDescriptorMap
|
* @param namedDescriptorMap Descriptors of the named parameters
|
||||||
*/
|
*/
|
||||||
public ParameterMetadata(OrdinalParameterDescriptor[] ordinalDescriptors, Map namedDescriptorMap) {
|
public ParameterMetadata(
|
||||||
|
OrdinalParameterDescriptor[] ordinalDescriptors,
|
||||||
|
Map<String,NamedParameterDescriptor> namedDescriptorMap) {
|
||||||
if ( ordinalDescriptors == null ) {
|
if ( ordinalDescriptors == null ) {
|
||||||
this.ordinalDescriptors = EMPTY_ORDINALS;
|
this.ordinalDescriptors = EMPTY_ORDINALS;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
OrdinalParameterDescriptor[] copy = new OrdinalParameterDescriptor[ ordinalDescriptors.length ];
|
final OrdinalParameterDescriptor[] copy = new OrdinalParameterDescriptor[ ordinalDescriptors.length ];
|
||||||
System.arraycopy( ordinalDescriptors, 0, copy, 0, ordinalDescriptors.length );
|
System.arraycopy( ordinalDescriptors, 0, copy, 0, ordinalDescriptors.length );
|
||||||
this.ordinalDescriptors = copy;
|
this.ordinalDescriptors = copy;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( namedDescriptorMap == null ) {
|
if ( namedDescriptorMap == null ) {
|
||||||
this.namedDescriptorMap = java.util.Collections.EMPTY_MAP;
|
this.namedDescriptorMap = java.util.Collections.emptyMap();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
int size = ( int ) ( ( namedDescriptorMap.size() / .75 ) + 1 );
|
final int size = (int) ( ( namedDescriptorMap.size() / .75 ) + 1 );
|
||||||
Map copy = new HashMap( size );
|
final Map<String,NamedParameterDescriptor> copy = new HashMap<String,NamedParameterDescriptor>( size );
|
||||||
copy.putAll( namedDescriptorMap );
|
copy.putAll( namedDescriptorMap );
|
||||||
this.namedDescriptorMap = java.util.Collections.unmodifiableMap( copy );
|
this.namedDescriptorMap = java.util.Collections.unmodifiableMap( copy );
|
||||||
}
|
}
|
||||||
|
@ -73,39 +75,107 @@ public class ParameterMetadata implements Serializable {
|
||||||
return ordinalDescriptors.length;
|
return ordinalDescriptors.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the descriptor for an ordinal parameter given its position
|
||||||
|
*
|
||||||
|
* @param position The position (1 based)
|
||||||
|
*
|
||||||
|
* @return The ordinal parameter descriptor
|
||||||
|
*
|
||||||
|
* @throws QueryParameterException If the position is out of range
|
||||||
|
*/
|
||||||
public OrdinalParameterDescriptor getOrdinalParameterDescriptor(int position) {
|
public OrdinalParameterDescriptor getOrdinalParameterDescriptor(int position) {
|
||||||
if ( position < 1 || position > ordinalDescriptors.length ) {
|
if ( position < 1 || position > ordinalDescriptors.length ) {
|
||||||
String error = "Position beyond number of declared ordinal parameters. " +
|
throw new QueryParameterException(
|
||||||
"Remember that ordinal parameters are 1-based! Position: " + position;
|
"Position beyond number of declared ordinal parameters. " +
|
||||||
throw new QueryParameterException( error );
|
"Remember that ordinal parameters are 1-based! Position: " + position
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return ordinalDescriptors[position - 1];
|
return ordinalDescriptors[position - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deprecated.
|
||||||
|
*
|
||||||
|
* @param position The position
|
||||||
|
*
|
||||||
|
* @return The type
|
||||||
|
*
|
||||||
|
* @deprecated Use {@link OrdinalParameterDescriptor#getExpectedType()} from the
|
||||||
|
* {@link #getOrdinalParameterDescriptor} return instead
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public Type getOrdinalParameterExpectedType(int position) {
|
public Type getOrdinalParameterExpectedType(int position) {
|
||||||
return getOrdinalParameterDescriptor( position ).getExpectedType();
|
return getOrdinalParameterDescriptor( position ).getExpectedType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deprecated.
|
||||||
|
*
|
||||||
|
* @param position The position
|
||||||
|
*
|
||||||
|
* @return The source location
|
||||||
|
*
|
||||||
|
* @deprecated Use {@link OrdinalParameterDescriptor#getSourceLocation()} from the
|
||||||
|
* {@link #getOrdinalParameterDescriptor} return instead
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public int getOrdinalParameterSourceLocation(int position) {
|
public int getOrdinalParameterSourceLocation(int position) {
|
||||||
return getOrdinalParameterDescriptor( position ).getSourceLocation();
|
return getOrdinalParameterDescriptor( position ).getSourceLocation();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Access to the names of all named parameters
|
||||||
|
*
|
||||||
|
* @return The named parameter names
|
||||||
|
*/
|
||||||
public Set getNamedParameterNames() {
|
public Set getNamedParameterNames() {
|
||||||
return namedDescriptorMap.keySet();
|
return namedDescriptorMap.keySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the descriptor for a named parameter given the name
|
||||||
|
*
|
||||||
|
* @param name The name of the parameter to locate
|
||||||
|
*
|
||||||
|
* @return The named parameter descriptor
|
||||||
|
*
|
||||||
|
* @throws QueryParameterException If the name could not be resolved to a named parameter
|
||||||
|
*/
|
||||||
public NamedParameterDescriptor getNamedParameterDescriptor(String name) {
|
public NamedParameterDescriptor getNamedParameterDescriptor(String name) {
|
||||||
NamedParameterDescriptor meta = ( NamedParameterDescriptor ) namedDescriptorMap.get( name );
|
final NamedParameterDescriptor meta = namedDescriptorMap.get( name );
|
||||||
if ( meta == null ) {
|
if ( meta == null ) {
|
||||||
throw new QueryParameterException( "could not locate named parameter [" + name + "]" );
|
throw new QueryParameterException( "could not locate named parameter [" + name + "]" );
|
||||||
}
|
}
|
||||||
return meta;
|
return meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deprecated.
|
||||||
|
*
|
||||||
|
* @param name The name of the parameter
|
||||||
|
*
|
||||||
|
* @return The type
|
||||||
|
*
|
||||||
|
* @deprecated Use {@link NamedParameterDescriptor#getExpectedType()} from the
|
||||||
|
* {@link #getNamedParameterDescriptor} return instead
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public Type getNamedParameterExpectedType(String name) {
|
public Type getNamedParameterExpectedType(String name) {
|
||||||
return getNamedParameterDescriptor( name ).getExpectedType();
|
return getNamedParameterDescriptor( name ).getExpectedType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deprecated.
|
||||||
|
*
|
||||||
|
* @param name The name of the parameter
|
||||||
|
*
|
||||||
|
* @return The type
|
||||||
|
*
|
||||||
|
* @deprecated Use {@link NamedParameterDescriptor#getSourceLocations()} from the
|
||||||
|
* {@link #getNamedParameterDescriptor} return instead
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public int[] getNamedParameterSourceLocations(String name) {
|
public int[] getNamedParameterSourceLocations(String name) {
|
||||||
return getNamedParameterDescriptor( name ).getSourceLocations();
|
return getNamedParameterDescriptor( name ).getSourceLocations();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
/*
|
/*
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
*
|
*
|
||||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
|
||||||
* indicated by the @author tags or express copyright attribution
|
* indicated by the @author tags or express copyright attribution
|
||||||
* statements applied by the authors. All third-party contributions are
|
* statements applied by the authors. All third-party contributions are
|
||||||
* distributed under license by Red Hat Middleware LLC.
|
* distributed under license by Red Hat Inc.
|
||||||
*
|
*
|
||||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
@ -20,9 +20,9 @@
|
||||||
* Free Software Foundation, Inc.
|
* Free Software Foundation, Inc.
|
||||||
* 51 Franklin Street, Fifth Floor
|
* 51 Franklin Street, Fifth Floor
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.query.spi;
|
package org.hibernate.engine.query.spi;
|
||||||
|
|
||||||
import org.hibernate.QueryException;
|
import org.hibernate.QueryException;
|
||||||
import org.hibernate.hql.internal.classic.ParserHelper;
|
import org.hibernate.hql.internal.classic.ParserHelper;
|
||||||
import org.hibernate.internal.util.StringHelper;
|
import org.hibernate.internal.util.StringHelper;
|
||||||
|
@ -36,12 +36,45 @@ import org.hibernate.internal.util.StringHelper;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ParameterParser {
|
public class ParameterParser {
|
||||||
|
/**
|
||||||
|
* Maybe better named a Journaler. Essentially provides a callback contract for things that recognize parameters
|
||||||
|
*/
|
||||||
public static interface Recognizer {
|
public static interface Recognizer {
|
||||||
|
/**
|
||||||
|
* Called when an output parameter is recognized
|
||||||
|
*
|
||||||
|
* @param position The position within the query
|
||||||
|
*/
|
||||||
public void outParameter(int position);
|
public void outParameter(int position);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when an ordinal parameter is recognized
|
||||||
|
*
|
||||||
|
* @param position The position within the query
|
||||||
|
*/
|
||||||
public void ordinalParameter(int position);
|
public void ordinalParameter(int position);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when a named parameter is recognized
|
||||||
|
*
|
||||||
|
* @param name The recognized parameter name
|
||||||
|
* @param position The position within the query
|
||||||
|
*/
|
||||||
public void namedParameter(String name, int position);
|
public void namedParameter(String name, int position);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when a JPA-style named parameter is recognized
|
||||||
|
*
|
||||||
|
* @param name The name of the JPA-style parameter
|
||||||
|
* @param position The position within the query
|
||||||
|
*/
|
||||||
public void jpaPositionalParameter(String name, int position);
|
public void jpaPositionalParameter(String name, int position);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when a character that is not a parameter (or part of a parameter dfinition) is recognized.
|
||||||
|
*
|
||||||
|
* @param character The recognized character
|
||||||
|
*/
|
||||||
public void other(char character);
|
public void other(char character);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,13 +97,13 @@ public class ParameterParser {
|
||||||
* @throws QueryException Indicates unexpected parameter conditions.
|
* @throws QueryException Indicates unexpected parameter conditions.
|
||||||
*/
|
*/
|
||||||
public static void parse(String sqlString, Recognizer recognizer) throws QueryException {
|
public static void parse(String sqlString, Recognizer recognizer) throws QueryException {
|
||||||
boolean hasMainOutputParameter = startsWithEscapeCallTemplate( sqlString );
|
final boolean hasMainOutputParameter = startsWithEscapeCallTemplate( sqlString );
|
||||||
boolean foundMainOutputParam = false;
|
boolean foundMainOutputParam = false;
|
||||||
|
|
||||||
int stringLength = sqlString.length();
|
final int stringLength = sqlString.length();
|
||||||
boolean inQuote = false;
|
boolean inQuote = false;
|
||||||
for ( int indx = 0; indx < stringLength; indx++ ) {
|
for ( int indx = 0; indx < stringLength; indx++ ) {
|
||||||
char c = sqlString.charAt( indx );
|
final char c = sqlString.charAt( indx );
|
||||||
if ( inQuote ) {
|
if ( inQuote ) {
|
||||||
if ( '\'' == c ) {
|
if ( '\'' == c ) {
|
||||||
inQuote = false;
|
inQuote = false;
|
||||||
|
@ -88,9 +121,9 @@ public class ParameterParser {
|
||||||
else {
|
else {
|
||||||
if ( c == ':' ) {
|
if ( c == ':' ) {
|
||||||
// named parameter
|
// named parameter
|
||||||
int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS_BITSET, indx + 1 );
|
final int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS_BITSET, indx + 1 );
|
||||||
int chopLocation = right < 0 ? sqlString.length() : right;
|
final int chopLocation = right < 0 ? sqlString.length() : right;
|
||||||
String param = sqlString.substring( indx + 1, chopLocation );
|
final String param = sqlString.substring( indx + 1, chopLocation );
|
||||||
if ( StringHelper.isEmpty( param ) ) {
|
if ( StringHelper.isEmpty( param ) ) {
|
||||||
throw new QueryException(
|
throw new QueryException(
|
||||||
"Space is not allowed after parameter prefix ':' [" + sqlString + "]"
|
"Space is not allowed after parameter prefix ':' [" + sqlString + "]"
|
||||||
|
@ -103,12 +136,12 @@ public class ParameterParser {
|
||||||
// could be either an ordinal or JPA-positional parameter
|
// could be either an ordinal or JPA-positional parameter
|
||||||
if ( indx < stringLength - 1 && Character.isDigit( sqlString.charAt( indx + 1 ) ) ) {
|
if ( indx < stringLength - 1 && Character.isDigit( sqlString.charAt( indx + 1 ) ) ) {
|
||||||
// a peek ahead showed this as an JPA-positional parameter
|
// a peek ahead showed this as an JPA-positional parameter
|
||||||
int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS, indx + 1 );
|
final int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS, indx + 1 );
|
||||||
int chopLocation = right < 0 ? sqlString.length() : right;
|
final int chopLocation = right < 0 ? sqlString.length() : right;
|
||||||
String param = sqlString.substring( indx + 1, chopLocation );
|
final String param = sqlString.substring( indx + 1, chopLocation );
|
||||||
// make sure this "name" is an integral
|
// make sure this "name" is an integral
|
||||||
try {
|
try {
|
||||||
Integer.valueOf( param );
|
Integer.valueOf( param );
|
||||||
}
|
}
|
||||||
catch( NumberFormatException e ) {
|
catch( NumberFormatException e ) {
|
||||||
throw new QueryException( "JPA-style positional param was not an integral ordinal" );
|
throw new QueryException( "JPA-style positional param was not an integral ordinal" );
|
||||||
|
@ -133,12 +166,19 @@ public class ParameterParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exposed as public solely for use from tests
|
||||||
|
*
|
||||||
|
* @param sqlString The SQL string to check
|
||||||
|
*
|
||||||
|
* @return true/false
|
||||||
|
*/
|
||||||
public static boolean startsWithEscapeCallTemplate(String sqlString) {
|
public static boolean startsWithEscapeCallTemplate(String sqlString) {
|
||||||
if ( ! ( sqlString.startsWith( "{" ) && sqlString.endsWith( "}" ) ) ) {
|
if ( ! ( sqlString.startsWith( "{" ) && sqlString.endsWith( "}" ) ) ) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
int chopLocation = sqlString.indexOf( "call" );
|
final int chopLocation = sqlString.indexOf( "call" );
|
||||||
if ( chopLocation <= 0 ) {
|
if ( chopLocation <= 0 ) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -147,7 +187,8 @@ public class ParameterParser {
|
||||||
final String fixture = "?=call";
|
final String fixture = "?=call";
|
||||||
int fixturePosition = 0;
|
int fixturePosition = 0;
|
||||||
boolean matches = true;
|
boolean matches = true;
|
||||||
for ( int i = 0, max = checkString.length(); i < max; i++ ) {
|
final int max = checkString.length();
|
||||||
|
for ( int i = 0; i < max; i++ ) {
|
||||||
final char c = Character.toLowerCase( checkString.charAt( i ) );
|
final char c = Character.toLowerCase( checkString.charAt( i ) );
|
||||||
if ( Character.isWhitespace( c ) ) {
|
if ( Character.isWhitespace( c ) ) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -1,95 +0,0 @@
|
||||||
/*
|
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
|
||||||
*
|
|
||||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
|
||||||
* indicated by the @author tags or express copyright attribution
|
|
||||||
* statements applied by the authors. All third-party contributions are
|
|
||||||
* distributed under license by Red Hat Middleware LLC.
|
|
||||||
*
|
|
||||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
|
||||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
|
||||||
* Lesser General Public License, as published by the Free Software Foundation.
|
|
||||||
*
|
|
||||||
* This program is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
|
||||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
|
||||||
* for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Lesser General Public License
|
|
||||||
* along with this distribution; if not, write to:
|
|
||||||
* Free Software Foundation, Inc.
|
|
||||||
* 51 Franklin Street, Fifth Floor
|
|
||||||
* Boston, MA 02110-1301 USA
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
package org.hibernate.engine.query.spi;
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import org.hibernate.type.Type;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines metadata regarding a translated HQL or native-SQL query.
|
|
||||||
*
|
|
||||||
* @author Steve Ebersole
|
|
||||||
*/
|
|
||||||
public class QueryMetadata implements Serializable {
|
|
||||||
private final String sourceQuery;
|
|
||||||
private final ParameterMetadata parameterMetadata;
|
|
||||||
private final String[] returnAliases;
|
|
||||||
private final Type[] returnTypes;
|
|
||||||
private final Set querySpaces;
|
|
||||||
|
|
||||||
public QueryMetadata(
|
|
||||||
String sourceQuery,
|
|
||||||
ParameterMetadata parameterMetadata,
|
|
||||||
String[] returnAliases,
|
|
||||||
Type[] returnTypes,
|
|
||||||
Set querySpaces) {
|
|
||||||
this.sourceQuery = sourceQuery;
|
|
||||||
this.parameterMetadata = parameterMetadata;
|
|
||||||
this.returnAliases = returnAliases;
|
|
||||||
this.returnTypes = returnTypes;
|
|
||||||
this.querySpaces = querySpaces;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the source HQL or native-SQL query.
|
|
||||||
*
|
|
||||||
* @return The source query.
|
|
||||||
*/
|
|
||||||
public String getSourceQuery() {
|
|
||||||
return sourceQuery;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ParameterMetadata getParameterMetadata() {
|
|
||||||
return parameterMetadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return source query select clause aliases (if any)
|
|
||||||
*
|
|
||||||
* @return an array of aliases as strings.
|
|
||||||
*/
|
|
||||||
public String[] getReturnAliases() {
|
|
||||||
return returnAliases;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An array of types describing the returns of the source query.
|
|
||||||
*
|
|
||||||
* @return The return type array.
|
|
||||||
*/
|
|
||||||
public Type[] getReturnTypes() {
|
|
||||||
return returnTypes;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The set of query spaces affected by this source query.
|
|
||||||
*
|
|
||||||
* @return The set of query spaces.
|
|
||||||
*/
|
|
||||||
public Set getQuerySpaces() {
|
|
||||||
return querySpaces;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -28,18 +28,17 @@ import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
|
|
||||||
import org.hibernate.Filter;
|
import org.hibernate.Filter;
|
||||||
import org.hibernate.MappingException;
|
import org.hibernate.MappingException;
|
||||||
import org.hibernate.QueryException;
|
import org.hibernate.QueryException;
|
||||||
import org.hibernate.cfg.Environment;
|
import org.hibernate.cfg.Environment;
|
||||||
import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification;
|
import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
|
import org.hibernate.internal.CoreLogging;
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.internal.FilterImpl;
|
import org.hibernate.internal.FilterImpl;
|
||||||
import org.hibernate.internal.util.collections.BoundedConcurrentHashMap;
|
import org.hibernate.internal.util.collections.BoundedConcurrentHashMap;
|
||||||
|
@ -55,8 +54,7 @@ import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class QueryPlanCache implements Serializable {
|
public class QueryPlanCache implements Serializable {
|
||||||
|
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( QueryPlanCache.class );
|
||||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, QueryPlanCache.class.getName());
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The default strong reference count.
|
* The default strong reference count.
|
||||||
|
@ -66,11 +64,14 @@ public class QueryPlanCache implements Serializable {
|
||||||
* The default soft reference count.
|
* The default soft reference count.
|
||||||
*/
|
*/
|
||||||
public static final int DEFAULT_QUERY_PLAN_MAX_COUNT = 2048;
|
public static final int DEFAULT_QUERY_PLAN_MAX_COUNT = 2048;
|
||||||
|
|
||||||
private final SessionFactoryImplementor factory;
|
private final SessionFactoryImplementor factory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the cache of the actual plans...
|
* the cache of the actual plans...
|
||||||
*/
|
*/
|
||||||
private final BoundedConcurrentHashMap queryPlanCache;
|
private final BoundedConcurrentHashMap queryPlanCache;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* simple cache of param metadata based on query string. Ideally, the original "user-supplied query"
|
* simple cache of param metadata based on query string. Ideally, the original "user-supplied query"
|
||||||
* string should be used to obtain this metadata (i.e., not the para-list-expanded query string) to avoid
|
* string should be used to obtain this metadata (i.e., not the para-list-expanded query string) to avoid
|
||||||
|
@ -81,6 +82,12 @@ public class QueryPlanCache implements Serializable {
|
||||||
*/
|
*/
|
||||||
private final BoundedConcurrentHashMap<String,ParameterMetadata> parameterMetadataCache;
|
private final BoundedConcurrentHashMap<String,ParameterMetadata> parameterMetadataCache;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs the QueryPlanCache to be used by the given SessionFactory
|
||||||
|
*
|
||||||
|
* @param factory The SessionFactory
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
public QueryPlanCache(final SessionFactoryImplementor factory) {
|
public QueryPlanCache(final SessionFactoryImplementor factory) {
|
||||||
this.factory = factory;
|
this.factory = factory;
|
||||||
|
|
||||||
|
@ -136,15 +143,17 @@ public class QueryPlanCache implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private ParameterMetadata buildParameterMetadata(String query){
|
private ParameterMetadata buildParameterMetadata(String query){
|
||||||
ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( query );
|
final ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( query );
|
||||||
|
|
||||||
final int size = recognizer.getOrdinalParameterLocationList().size();
|
final int size = recognizer.getOrdinalParameterLocationList().size();
|
||||||
OrdinalParameterDescriptor[] ordinalDescriptors = new OrdinalParameterDescriptor[ size ];
|
final OrdinalParameterDescriptor[] ordinalDescriptors = new OrdinalParameterDescriptor[ size ];
|
||||||
for ( int i = 0; i < size; i++ ) {
|
for ( int i = 0; i < size; i++ ) {
|
||||||
final Integer position = recognizer.getOrdinalParameterLocationList().get( i );
|
final Integer position = recognizer.getOrdinalParameterLocationList().get( i );
|
||||||
ordinalDescriptors[i] = new OrdinalParameterDescriptor( i, null, position );
|
ordinalDescriptors[i] = new OrdinalParameterDescriptor( i, null, position );
|
||||||
}
|
}
|
||||||
Map<String, NamedParameterDescriptor> namedParamDescriptorMap = new HashMap<String, NamedParameterDescriptor>();
|
|
||||||
Map<String, ParamLocationRecognizer.NamedParameterDescription> map = recognizer.getNamedParameterDescriptionMap();
|
final Map<String, NamedParameterDescriptor> namedParamDescriptorMap = new HashMap<String, NamedParameterDescriptor>();
|
||||||
|
final Map<String, ParamLocationRecognizer.NamedParameterDescription> map = recognizer.getNamedParameterDescriptionMap();
|
||||||
for ( final String name : map.keySet() ) {
|
for ( final String name : map.keySet() ) {
|
||||||
final ParamLocationRecognizer.NamedParameterDescription description = map.get( name );
|
final ParamLocationRecognizer.NamedParameterDescription description = map.get( name );
|
||||||
namedParamDescriptorMap.put(
|
namedParamDescriptorMap.put(
|
||||||
|
@ -160,9 +169,22 @@ public class QueryPlanCache implements Serializable {
|
||||||
return new ParameterMetadata( ordinalDescriptors, namedParamDescriptorMap );
|
return new ParameterMetadata( ordinalDescriptors, namedParamDescriptorMap );
|
||||||
}
|
}
|
||||||
|
|
||||||
public HQLQueryPlan getHQLQueryPlan( String queryString, boolean shallow, Map<String, Filter> enabledFilters)
|
/**
|
||||||
|
* Get the query plan for the given HQL query, creating it and caching it if not already cached
|
||||||
|
*
|
||||||
|
* @param queryString The HQL query string
|
||||||
|
* @param shallow Whether the execution will be shallow
|
||||||
|
* @param enabledFilters The filters enabled on the Session
|
||||||
|
*
|
||||||
|
* @return The query plan
|
||||||
|
*
|
||||||
|
* @throws QueryException Indicates a problem translating the query
|
||||||
|
* @throws MappingException Indicates a problem translating the query
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public HQLQueryPlan getHQLQueryPlan(String queryString, boolean shallow, Map<String,Filter> enabledFilters)
|
||||||
throws QueryException, MappingException {
|
throws QueryException, MappingException {
|
||||||
HQLQueryPlanKey key = new HQLQueryPlanKey( queryString, shallow, enabledFilters );
|
final HQLQueryPlanKey key = new HQLQueryPlanKey( queryString, shallow, enabledFilters );
|
||||||
HQLQueryPlan value = (HQLQueryPlan) queryPlanCache.get( key );
|
HQLQueryPlan value = (HQLQueryPlan) queryPlanCache.get( key );
|
||||||
if ( value == null ) {
|
if ( value == null ) {
|
||||||
LOG.tracev( "Unable to locate HQL query plan in cache; generating ({0})", queryString );
|
LOG.tracev( "Unable to locate HQL query plan in cache; generating ({0})", queryString );
|
||||||
|
@ -174,37 +196,69 @@ public class QueryPlanCache implements Serializable {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the query plan for the given collection HQL filter fragment, creating it and caching it if not already cached
|
||||||
public FilterQueryPlan getFilterQueryPlan(String filterString, String collectionRole, boolean shallow, Map enabledFilters)
|
*
|
||||||
throws QueryException, MappingException {
|
* @param filterString The HQL filter fragment
|
||||||
FilterQueryPlanKey key = new FilterQueryPlanKey( filterString, collectionRole, shallow, enabledFilters );
|
* @param collectionRole The collection being filtered
|
||||||
|
* @param shallow Whether the execution will be shallow
|
||||||
|
* @param enabledFilters The filters enabled on the Session
|
||||||
|
*
|
||||||
|
* @return The query plan
|
||||||
|
*
|
||||||
|
* @throws QueryException Indicates a problem translating the query
|
||||||
|
* @throws MappingException Indicates a problem translating the query
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public FilterQueryPlan getFilterQueryPlan(
|
||||||
|
String filterString,
|
||||||
|
String collectionRole,
|
||||||
|
boolean shallow,
|
||||||
|
Map<String,Filter> enabledFilters) throws QueryException, MappingException {
|
||||||
|
final FilterQueryPlanKey key = new FilterQueryPlanKey( filterString, collectionRole, shallow, enabledFilters );
|
||||||
FilterQueryPlan value = (FilterQueryPlan) queryPlanCache.get( key );
|
FilterQueryPlan value = (FilterQueryPlan) queryPlanCache.get( key );
|
||||||
if(value == null){
|
if ( value == null ) {
|
||||||
LOG.tracev( "Unable to locate collection-filter query plan in cache; generating ({0} : {1} )",
|
LOG.tracev(
|
||||||
collectionRole, filterString );
|
"Unable to locate collection-filter query plan in cache; generating ({0} : {1} )",
|
||||||
|
collectionRole,
|
||||||
|
filterString
|
||||||
|
);
|
||||||
value = new FilterQueryPlan( filterString, collectionRole, shallow, enabledFilters,factory );
|
value = new FilterQueryPlan( filterString, collectionRole, shallow, enabledFilters,factory );
|
||||||
queryPlanCache.putIfAbsent( key, value );
|
queryPlanCache.putIfAbsent( key, value );
|
||||||
} else {
|
}
|
||||||
|
else {
|
||||||
LOG.tracev( "Located collection-filter query plan in cache ({0} : {1})", collectionRole, filterString );
|
LOG.tracev( "Located collection-filter query plan in cache ({0} : {1})", collectionRole, filterString );
|
||||||
}
|
}
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the query plan for a native SQL query, creating it and caching it if not already cached
|
||||||
|
*
|
||||||
|
* @param spec The native SQL query specification
|
||||||
|
*
|
||||||
|
* @return The query plan
|
||||||
|
*
|
||||||
|
* @throws QueryException Indicates a problem translating the query
|
||||||
|
* @throws MappingException Indicates a problem translating the query
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
public NativeSQLQueryPlan getNativeSQLQueryPlan(final NativeSQLQuerySpecification spec) {
|
public NativeSQLQueryPlan getNativeSQLQueryPlan(final NativeSQLQuerySpecification spec) {
|
||||||
NativeSQLQueryPlan value = (NativeSQLQueryPlan) queryPlanCache.get( spec );
|
NativeSQLQueryPlan value = (NativeSQLQueryPlan) queryPlanCache.get( spec );
|
||||||
if(value == null){
|
if ( value == null ) {
|
||||||
LOG.tracev( "Unable to locate native-sql query plan in cache; generating ({0})", spec.getQueryString() );
|
LOG.tracev( "Unable to locate native-sql query plan in cache; generating ({0})", spec.getQueryString() );
|
||||||
value = new NativeSQLQueryPlan( spec, factory);
|
value = new NativeSQLQueryPlan( spec, factory);
|
||||||
queryPlanCache.putIfAbsent( spec, value );
|
queryPlanCache.putIfAbsent( spec, value );
|
||||||
} else {
|
}
|
||||||
|
else {
|
||||||
LOG.tracev( "Located native-sql query plan in cache ({0})", spec.getQueryString() );
|
LOG.tracev( "Located native-sql query plan in cache ({0})", spec.getQueryString() );
|
||||||
}
|
}
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
//clean up QueryPlanCache when Sessionfactory is closed
|
* clean up QueryPlanCache when SessionFactory is closed
|
||||||
|
*/
|
||||||
public void cleanup() {
|
public void cleanup() {
|
||||||
LOG.trace( "Cleaning QueryPlan Cache" );
|
LOG.trace( "Cleaning QueryPlan Cache" );
|
||||||
queryPlanCache.clear();
|
queryPlanCache.clear();
|
||||||
|
@ -224,7 +278,7 @@ public class QueryPlanCache implements Serializable {
|
||||||
filterKeys = Collections.emptySet();
|
filterKeys = Collections.emptySet();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Set<DynamicFilterKey> tmp = new HashSet<DynamicFilterKey>(
|
final Set<DynamicFilterKey> tmp = new HashSet<DynamicFilterKey>(
|
||||||
CollectionHelper.determineProperSizing( enabledFilters ),
|
CollectionHelper.determineProperSizing( enabledFilters ),
|
||||||
CollectionHelper.LOAD_FACTOR
|
CollectionHelper.LOAD_FACTOR
|
||||||
);
|
);
|
||||||
|
@ -249,7 +303,7 @@ public class QueryPlanCache implements Serializable {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
final HQLQueryPlanKey that = ( HQLQueryPlanKey ) o;
|
final HQLQueryPlanKey that = (HQLQueryPlanKey) o;
|
||||||
|
|
||||||
return shallow == that.shallow
|
return shallow == that.shallow
|
||||||
&& filterKeys.equals( that.filterKeys )
|
&& filterKeys.equals( that.filterKeys )
|
||||||
|
@ -307,8 +361,7 @@ public class QueryPlanCache implements Serializable {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
DynamicFilterKey that = ( DynamicFilterKey ) o;
|
final DynamicFilterKey that = (DynamicFilterKey) o;
|
||||||
|
|
||||||
return filterName.equals( that.filterName )
|
return filterName.equals( that.filterName )
|
||||||
&& parameterMetadata.equals( that.parameterMetadata );
|
&& parameterMetadata.equals( that.parameterMetadata );
|
||||||
|
|
||||||
|
@ -337,7 +390,7 @@ public class QueryPlanCache implements Serializable {
|
||||||
this.filterNames = Collections.emptySet();
|
this.filterNames = Collections.emptySet();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Set<String> tmp = new HashSet<String>();
|
final Set<String> tmp = new HashSet<String>();
|
||||||
tmp.addAll( enabledFilters.keySet() );
|
tmp.addAll( enabledFilters.keySet() );
|
||||||
this.filterNames = Collections.unmodifiableSet( tmp );
|
this.filterNames = Collections.unmodifiableSet( tmp );
|
||||||
|
|
||||||
|
@ -359,8 +412,7 @@ public class QueryPlanCache implements Serializable {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
final FilterQueryPlanKey that = ( FilterQueryPlanKey ) o;
|
final FilterQueryPlanKey that = (FilterQueryPlanKey) o;
|
||||||
|
|
||||||
return shallow == that.shallow
|
return shallow == that.shallow
|
||||||
&& filterNames.equals( that.filterNames )
|
&& filterNames.equals( that.filterNames )
|
||||||
&& query.equals( that.query )
|
&& query.equals( that.query )
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
/*
|
/*
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
*
|
*
|
||||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
|
||||||
* indicated by the @author tags or express copyright attribution
|
* indicated by the @author tags or express copyright attribution
|
||||||
* statements applied by the authors. All third-party contributions are
|
* statements applied by the authors. All third-party contributions are
|
||||||
* distributed under license by Red Hat Middleware LLC.
|
* distributed under license by Red Hat Inc.
|
||||||
*
|
*
|
||||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||||
|
@ -20,21 +20,23 @@
|
||||||
* Free Software Foundation, Inc.
|
* Free Software Foundation, Inc.
|
||||||
* 51 Franklin Street, Fifth Floor
|
* 51 Franklin Street, Fifth Floor
|
||||||
* Boston, MA 02110-1301 USA
|
* Boston, MA 02110-1301 USA
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
package org.hibernate.engine.query.spi;
|
package org.hibernate.engine.query.spi;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
||||||
import org.hibernate.type.Type;
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Metadata about the query return(s).
|
||||||
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ReturnMetadata implements Serializable {
|
public class ReturnMetadata implements Serializable {
|
||||||
private final String[] returnAliases;
|
private final String[] returnAliases;
|
||||||
private final Type[] returnTypes;
|
private final Type[] returnTypes;
|
||||||
|
|
||||||
public ReturnMetadata(String[] returnAliases, Type[] returnTypes) {
|
ReturnMetadata(String[] returnAliases, Type[] returnTypes) {
|
||||||
this.returnAliases = returnAliases;
|
this.returnAliases = returnAliases;
|
||||||
this.returnTypes = returnTypes;
|
this.returnTypes = returnTypes;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
/**
|
||||||
|
* Defines support for query plans and stored metadata about queries
|
||||||
|
*/
|
||||||
|
package org.hibernate.engine.query.spi;
|
|
@ -52,6 +52,7 @@ import org.hibernate.action.internal.EntityDeleteAction;
|
||||||
import org.hibernate.action.internal.EntityIdentityInsertAction;
|
import org.hibernate.action.internal.EntityIdentityInsertAction;
|
||||||
import org.hibernate.action.internal.EntityInsertAction;
|
import org.hibernate.action.internal.EntityInsertAction;
|
||||||
import org.hibernate.action.internal.EntityUpdateAction;
|
import org.hibernate.action.internal.EntityUpdateAction;
|
||||||
|
import org.hibernate.action.internal.QueuedOperationCollectionAction;
|
||||||
import org.hibernate.action.internal.UnresolvedEntityInsertActions;
|
import org.hibernate.action.internal.UnresolvedEntityInsertActions;
|
||||||
import org.hibernate.action.spi.AfterTransactionCompletionProcess;
|
import org.hibernate.action.spi.AfterTransactionCompletionProcess;
|
||||||
import org.hibernate.action.spi.BeforeTransactionCompletionProcess;
|
import org.hibernate.action.spi.BeforeTransactionCompletionProcess;
|
||||||
|
@ -63,7 +64,7 @@ import org.hibernate.type.Type;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Responsible for maintaining the queue of actions related to events.
|
* Responsible for maintaining the queue of actions related to events.
|
||||||
* </p>
|
*
|
||||||
* The ActionQueue holds the DML operations queued as part of a session's
|
* The ActionQueue holds the DML operations queued as part of a session's
|
||||||
* transactional-write-behind semantics. DML operations are queued here
|
* transactional-write-behind semantics. DML operations are queued here
|
||||||
* until a flush forces them to be executed against the database.
|
* until a flush forces them to be executed against the database.
|
||||||
|
@ -91,6 +92,7 @@ public class ActionQueue {
|
||||||
// just re-use the same Lists for convenience.
|
// just re-use the same Lists for convenience.
|
||||||
private ArrayList collectionCreations;
|
private ArrayList collectionCreations;
|
||||||
private ArrayList collectionUpdates;
|
private ArrayList collectionUpdates;
|
||||||
|
private ArrayList collectionQueuedOps;
|
||||||
private ArrayList collectionRemovals;
|
private ArrayList collectionRemovals;
|
||||||
|
|
||||||
private AfterTransactionCompletionProcessQueue afterTransactionProcesses;
|
private AfterTransactionCompletionProcessQueue afterTransactionProcesses;
|
||||||
|
@ -115,6 +117,7 @@ public class ActionQueue {
|
||||||
collectionCreations = new ArrayList( INIT_QUEUE_LIST_SIZE );
|
collectionCreations = new ArrayList( INIT_QUEUE_LIST_SIZE );
|
||||||
collectionRemovals = new ArrayList( INIT_QUEUE_LIST_SIZE );
|
collectionRemovals = new ArrayList( INIT_QUEUE_LIST_SIZE );
|
||||||
collectionUpdates = new ArrayList( INIT_QUEUE_LIST_SIZE );
|
collectionUpdates = new ArrayList( INIT_QUEUE_LIST_SIZE );
|
||||||
|
collectionQueuedOps = new ArrayList( INIT_QUEUE_LIST_SIZE );
|
||||||
|
|
||||||
afterTransactionProcesses = new AfterTransactionCompletionProcessQueue( session );
|
afterTransactionProcesses = new AfterTransactionCompletionProcessQueue( session );
|
||||||
beforeTransactionProcesses = new BeforeTransactionCompletionProcessQueue( session );
|
beforeTransactionProcesses = new BeforeTransactionCompletionProcessQueue( session );
|
||||||
|
@ -128,6 +131,7 @@ public class ActionQueue {
|
||||||
collectionCreations.clear();
|
collectionCreations.clear();
|
||||||
collectionRemovals.clear();
|
collectionRemovals.clear();
|
||||||
collectionUpdates.clear();
|
collectionUpdates.clear();
|
||||||
|
collectionQueuedOps.clear();
|
||||||
|
|
||||||
unresolvedInsertions.clear();
|
unresolvedInsertions.clear();
|
||||||
}
|
}
|
||||||
|
@ -163,6 +167,11 @@ public class ActionQueue {
|
||||||
collectionUpdates.add( action );
|
collectionUpdates.add( action );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings({ "unchecked" })
|
||||||
|
public void addAction(QueuedOperationCollectionAction action) {
|
||||||
|
collectionQueuedOps.add( action );
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings({ "unchecked" })
|
@SuppressWarnings({ "unchecked" })
|
||||||
public void addAction(EntityIdentityInsertAction insert) {
|
public void addAction(EntityIdentityInsertAction insert) {
|
||||||
LOG.tracev( "Adding an EntityIdentityInsertAction for [{0}] object", insert.getEntityName() );
|
LOG.tracev( "Adding an EntityIdentityInsertAction for [{0}] object", insert.getEntityName() );
|
||||||
|
@ -276,6 +285,8 @@ public class ActionQueue {
|
||||||
}
|
}
|
||||||
executeActions( insertions );
|
executeActions( insertions );
|
||||||
executeActions( updates );
|
executeActions( updates );
|
||||||
|
// do before actions are handled in the other collection queues
|
||||||
|
executeActions( collectionQueuedOps );
|
||||||
executeActions( collectionRemovals );
|
executeActions( collectionRemovals );
|
||||||
executeActions( collectionUpdates );
|
executeActions( collectionUpdates );
|
||||||
executeActions( collectionCreations );
|
executeActions( collectionCreations );
|
||||||
|
@ -291,6 +302,7 @@ public class ActionQueue {
|
||||||
prepareActions( collectionRemovals );
|
prepareActions( collectionRemovals );
|
||||||
prepareActions( collectionUpdates );
|
prepareActions( collectionUpdates );
|
||||||
prepareActions( collectionCreations );
|
prepareActions( collectionCreations );
|
||||||
|
prepareActions( collectionQueuedOps );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -325,6 +337,7 @@ public class ActionQueue {
|
||||||
areTablesToUpdated( deletions, tables ) ||
|
areTablesToUpdated( deletions, tables ) ||
|
||||||
areTablesToUpdated( collectionUpdates, tables ) ||
|
areTablesToUpdated( collectionUpdates, tables ) ||
|
||||||
areTablesToUpdated( collectionCreations, tables ) ||
|
areTablesToUpdated( collectionCreations, tables ) ||
|
||||||
|
areTablesToUpdated( collectionQueuedOps, tables ) ||
|
||||||
areTablesToUpdated( collectionRemovals, tables );
|
areTablesToUpdated( collectionRemovals, tables );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -401,6 +414,7 @@ public class ActionQueue {
|
||||||
.append( " collectionCreations=" ).append( collectionCreations )
|
.append( " collectionCreations=" ).append( collectionCreations )
|
||||||
.append( " collectionRemovals=" ).append( collectionRemovals )
|
.append( " collectionRemovals=" ).append( collectionRemovals )
|
||||||
.append( " collectionUpdates=" ).append( collectionUpdates )
|
.append( " collectionUpdates=" ).append( collectionUpdates )
|
||||||
|
.append( " collectionQueuedOps=" ).append( collectionQueuedOps )
|
||||||
.append( " unresolvedInsertDependencies=" ).append( unresolvedInsertions )
|
.append( " unresolvedInsertDependencies=" ).append( unresolvedInsertions )
|
||||||
.append( "]" )
|
.append( "]" )
|
||||||
.toString();
|
.toString();
|
||||||
|
@ -436,6 +450,7 @@ public class ActionQueue {
|
||||||
//sort the updates by fk
|
//sort the updates by fk
|
||||||
java.util.Collections.sort( collectionCreations );
|
java.util.Collections.sort( collectionCreations );
|
||||||
java.util.Collections.sort( collectionUpdates );
|
java.util.Collections.sort( collectionUpdates );
|
||||||
|
java.util.Collections.sort( collectionQueuedOps );
|
||||||
java.util.Collections.sort( collectionRemovals );
|
java.util.Collections.sort( collectionRemovals );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -472,6 +487,7 @@ public class ActionQueue {
|
||||||
public void clearFromFlushNeededCheck(int previousCollectionRemovalSize) {
|
public void clearFromFlushNeededCheck(int previousCollectionRemovalSize) {
|
||||||
collectionCreations.clear();
|
collectionCreations.clear();
|
||||||
collectionUpdates.clear();
|
collectionUpdates.clear();
|
||||||
|
collectionQueuedOps.clear();
|
||||||
updates.clear();
|
updates.clear();
|
||||||
// collection deletions are a special case since update() can add
|
// collection deletions are a special case since update() can add
|
||||||
// deletions of collections not loaded by the session.
|
// deletions of collections not loaded by the session.
|
||||||
|
@ -495,6 +511,7 @@ public class ActionQueue {
|
||||||
! unresolvedInsertions.isEmpty() ||
|
! unresolvedInsertions.isEmpty() ||
|
||||||
deletions.size() > 0 ||
|
deletions.size() > 0 ||
|
||||||
collectionUpdates.size() > 0 ||
|
collectionUpdates.size() > 0 ||
|
||||||
|
collectionQueuedOps.size() > 0 ||
|
||||||
collectionRemovals.size() > 0 ||
|
collectionRemovals.size() > 0 ||
|
||||||
collectionCreations.size() > 0;
|
collectionCreations.size() > 0;
|
||||||
}
|
}
|
||||||
|
@ -564,6 +581,13 @@ public class ActionQueue {
|
||||||
for ( int i = 0; i < queueSize; i++ ) {
|
for ( int i = 0; i < queueSize; i++ ) {
|
||||||
oos.writeObject( collectionCreations.get( i ) );
|
oos.writeObject( collectionCreations.get( i ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
queueSize = collectionQueuedOps.size();
|
||||||
|
LOG.tracev( "Starting serialization of [{0}] collectionQueuedOps entries", queueSize );
|
||||||
|
oos.writeInt( queueSize );
|
||||||
|
for ( int i = 0; i < queueSize; i++ ) {
|
||||||
|
oos.writeObject( collectionQueuedOps.get( i ) );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -640,6 +664,15 @@ public class ActionQueue {
|
||||||
action.afterDeserialize( session );
|
action.afterDeserialize( session );
|
||||||
rtn.collectionCreations.add( action );
|
rtn.collectionCreations.add( action );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
queueSize = ois.readInt();
|
||||||
|
LOG.tracev( "Starting deserialization of [{0}] collectionQueuedOps entries", queueSize );
|
||||||
|
rtn.collectionQueuedOps = new ArrayList<Executable>( queueSize );
|
||||||
|
for ( int i = 0; i < queueSize; i++ ) {
|
||||||
|
CollectionAction action = ( CollectionAction ) ois.readObject();
|
||||||
|
action.afterDeserialize( session );
|
||||||
|
rtn.collectionQueuedOps.add( action );
|
||||||
|
}
|
||||||
return rtn;
|
return rtn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.hibernate.HibernateException;
|
||||||
import org.hibernate.action.internal.CollectionRecreateAction;
|
import org.hibernate.action.internal.CollectionRecreateAction;
|
||||||
import org.hibernate.action.internal.CollectionRemoveAction;
|
import org.hibernate.action.internal.CollectionRemoveAction;
|
||||||
import org.hibernate.action.internal.CollectionUpdateAction;
|
import org.hibernate.action.internal.CollectionUpdateAction;
|
||||||
|
import org.hibernate.action.internal.QueuedOperationCollectionAction;
|
||||||
import org.hibernate.collection.spi.PersistentCollection;
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
import org.hibernate.engine.internal.Cascade;
|
import org.hibernate.engine.internal.Cascade;
|
||||||
import org.hibernate.engine.internal.CascadePoint;
|
import org.hibernate.engine.internal.CascadePoint;
|
||||||
|
@ -294,6 +295,16 @@ public abstract class AbstractFlushingEventListener implements Serializable {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
if ( !coll.wasInitialized() && coll.hasQueuedOperations() ) {
|
||||||
|
actionQueue.addAction(
|
||||||
|
new QueuedOperationCollectionAction(
|
||||||
|
coll,
|
||||||
|
ce.getLoadedPersister(),
|
||||||
|
ce.getLoadedKey(),
|
||||||
|
session
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -70,6 +70,7 @@ import org.hibernate.internal.util.ReflectHelper;
|
||||||
import org.hibernate.internal.util.StringHelper;
|
import org.hibernate.internal.util.StringHelper;
|
||||||
import org.hibernate.internal.util.collections.IdentitySet;
|
import org.hibernate.internal.util.collections.IdentitySet;
|
||||||
import org.hibernate.loader.hql.QueryLoader;
|
import org.hibernate.loader.hql.QueryLoader;
|
||||||
|
import org.hibernate.param.ParameterSpecification;
|
||||||
import org.hibernate.persister.entity.Queryable;
|
import org.hibernate.persister.entity.Queryable;
|
||||||
import org.hibernate.type.Type;
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
|
@ -101,7 +102,7 @@ public class QueryTranslatorImpl implements FilterTranslator {
|
||||||
private String sql;
|
private String sql;
|
||||||
|
|
||||||
private ParameterTranslations paramTranslations;
|
private ParameterTranslations paramTranslations;
|
||||||
private List collectedParameterSpecifications;
|
private List<ParameterSpecification> collectedParameterSpecifications;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -570,12 +571,11 @@ public class QueryTranslatorImpl implements FilterTranslator {
|
||||||
public ParameterTranslations getParameterTranslations() {
|
public ParameterTranslations getParameterTranslations() {
|
||||||
if ( paramTranslations == null ) {
|
if ( paramTranslations == null ) {
|
||||||
paramTranslations = new ParameterTranslationsImpl( getWalker().getParameters() );
|
paramTranslations = new ParameterTranslationsImpl( getWalker().getParameters() );
|
||||||
// paramTranslations = new ParameterTranslationsImpl( collectedParameterSpecifications );
|
|
||||||
}
|
}
|
||||||
return paramTranslations;
|
return paramTranslations;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List getCollectedParameterSpecifications() {
|
public List<ParameterSpecification> getCollectedParameterSpecifications() {
|
||||||
return collectedParameterSpecifications;
|
return collectedParameterSpecifications;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,7 @@ public class SqlGenerator extends SqlGeneratorBase implements ErrorReporter {
|
||||||
private SessionFactoryImplementor sessionFactory;
|
private SessionFactoryImplementor sessionFactory;
|
||||||
private LinkedList<SqlWriter> outputStack = new LinkedList<SqlWriter>();
|
private LinkedList<SqlWriter> outputStack = new LinkedList<SqlWriter>();
|
||||||
private final ASTPrinter printer = new ASTPrinter( SqlTokenTypes.class );
|
private final ASTPrinter printer = new ASTPrinter( SqlTokenTypes.class );
|
||||||
private List collectedParameters = new ArrayList();
|
private List<ParameterSpecification> collectedParameters = new ArrayList<ParameterSpecification>();
|
||||||
|
|
||||||
|
|
||||||
// handle trace logging ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// handle trace logging ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -106,7 +106,7 @@ public class SqlGenerator extends SqlGeneratorBase implements ErrorReporter {
|
||||||
LOG.trace( prefix + ruleName );
|
LOG.trace( prefix + ruleName );
|
||||||
}
|
}
|
||||||
|
|
||||||
public List getCollectedParameters() {
|
public List<ParameterSpecification> getCollectedParameters() {
|
||||||
return collectedParameters;
|
return collectedParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -452,16 +452,12 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
|
||||||
this.alias = alias;
|
this.alias = alias;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
@Override
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
public String getSqlFragment() {
|
public String getSqlFragment() {
|
||||||
return persisterDiscriminatorMetadata.getSqlFragment( alias );
|
return persisterDiscriminatorMetadata.getSqlFragment( alias );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
@Override
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
public Type getResolutionType() {
|
public Type getResolutionType() {
|
||||||
return persisterDiscriminatorMetadata.getResolutionType();
|
return persisterDiscriminatorMetadata.getResolutionType();
|
||||||
}
|
}
|
||||||
|
@ -660,21 +656,24 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
|
||||||
|
|
||||||
|
|
||||||
// ParameterContainer impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ParameterContainer impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
private List embeddedParameters;
|
private List<ParameterSpecification> embeddedParameters;
|
||||||
|
|
||||||
|
@Override
|
||||||
public void addEmbeddedParameter(ParameterSpecification specification) {
|
public void addEmbeddedParameter(ParameterSpecification specification) {
|
||||||
if ( embeddedParameters == null ) {
|
if ( embeddedParameters == null ) {
|
||||||
embeddedParameters = new ArrayList();
|
embeddedParameters = new ArrayList<ParameterSpecification>();
|
||||||
}
|
}
|
||||||
embeddedParameters.add( specification );
|
embeddedParameters.add( specification );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public boolean hasEmbeddedParameters() {
|
public boolean hasEmbeddedParameters() {
|
||||||
return embeddedParameters != null && ! embeddedParameters.isEmpty();
|
return embeddedParameters != null && ! embeddedParameters.isEmpty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public ParameterSpecification[] getEmbeddedParameters() {
|
public ParameterSpecification[] getEmbeddedParameters() {
|
||||||
return ( ParameterSpecification[] ) embeddedParameters.toArray( new ParameterSpecification[ embeddedParameters.size() ] );
|
return embeddedParameters.toArray( new ParameterSpecification[ embeddedParameters.size() ] );
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParameterSpecification getIndexCollectionSelectorParamSpec() {
|
public ParameterSpecification getIndexCollectionSelectorParamSpec() {
|
||||||
|
|
|
@ -147,14 +147,14 @@ public class IndexNode extends FromReferenceNode {
|
||||||
}
|
}
|
||||||
String selectorExpression = gen.getSQL();
|
String selectorExpression = gen.getSQL();
|
||||||
joinSequence.addCondition( collectionTableAlias + '.' + indexCols[0] + " = " + selectorExpression );
|
joinSequence.addCondition( collectionTableAlias + '.' + indexCols[0] + " = " + selectorExpression );
|
||||||
List paramSpecs = gen.getCollectedParameters();
|
List<ParameterSpecification> paramSpecs = gen.getCollectedParameters();
|
||||||
if ( paramSpecs != null ) {
|
if ( paramSpecs != null ) {
|
||||||
switch ( paramSpecs.size() ) {
|
switch ( paramSpecs.size() ) {
|
||||||
case 0 :
|
case 0 :
|
||||||
// nothing to do
|
// nothing to do
|
||||||
break;
|
break;
|
||||||
case 1 :
|
case 1 :
|
||||||
ParameterSpecification paramSpec = ( ParameterSpecification ) paramSpecs.get( 0 );
|
ParameterSpecification paramSpec = paramSpecs.get( 0 );
|
||||||
paramSpec.setExpectedType( queryableCollection.getIndexType() );
|
paramSpec.setExpectedType( queryableCollection.getIndexType() );
|
||||||
fromElement.setIndexCollectionSelectorParamSpec( paramSpec );
|
fromElement.setIndexCollectionSelectorParamSpec( paramSpec );
|
||||||
break;
|
break;
|
||||||
|
@ -176,39 +176,40 @@ public class IndexNode extends FromReferenceNode {
|
||||||
* In the (rare?) case where the index selector contains multiple parameters...
|
* In the (rare?) case where the index selector contains multiple parameters...
|
||||||
*/
|
*/
|
||||||
private static class AggregatedIndexCollectionSelectorParameterSpecifications implements ParameterSpecification {
|
private static class AggregatedIndexCollectionSelectorParameterSpecifications implements ParameterSpecification {
|
||||||
private final List paramSpecs;
|
private final List<ParameterSpecification> paramSpecs;
|
||||||
|
|
||||||
public AggregatedIndexCollectionSelectorParameterSpecifications(List paramSpecs) {
|
public AggregatedIndexCollectionSelectorParameterSpecifications(List<ParameterSpecification> paramSpecs) {
|
||||||
this.paramSpecs = paramSpecs;
|
this.paramSpecs = paramSpecs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public int bind(PreparedStatement statement, QueryParameters qp, SessionImplementor session, int position)
|
public int bind(PreparedStatement statement, QueryParameters qp, SessionImplementor session, int position)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
int bindCount = 0;
|
int bindCount = 0;
|
||||||
Iterator itr = paramSpecs.iterator();
|
for ( ParameterSpecification paramSpec : paramSpecs ) {
|
||||||
while ( itr.hasNext() ) {
|
|
||||||
final ParameterSpecification paramSpec = ( ParameterSpecification ) itr.next();
|
|
||||||
bindCount += paramSpec.bind( statement, qp, session, position + bindCount );
|
bindCount += paramSpec.bind( statement, qp, session, position + bindCount );
|
||||||
}
|
}
|
||||||
return bindCount;
|
return bindCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Type getExpectedType() {
|
public Type getExpectedType() {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void setExpectedType(Type expectedType) {
|
public void setExpectedType(Type expectedType) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public String renderDisplayInfo() {
|
public String renderDisplayInfo() {
|
||||||
return "index-selector [" + collectDisplayInfo() + "]" ;
|
return "index-selector [" + collectDisplayInfo() + "]" ;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String collectDisplayInfo() {
|
private String collectDisplayInfo() {
|
||||||
StringBuilder buffer = new StringBuilder();
|
StringBuilder buffer = new StringBuilder();
|
||||||
Iterator itr = paramSpecs.iterator();
|
for ( ParameterSpecification paramSpec : paramSpecs ) {
|
||||||
while ( itr.hasNext() ) {
|
buffer.append( ( paramSpec ).renderDisplayInfo() );
|
||||||
buffer.append( ( ( ParameterSpecification ) itr.next() ).renderDisplayInfo() );
|
|
||||||
}
|
}
|
||||||
return buffer.toString();
|
return buffer.toString();
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue