Merge branch 'master' into metamodel
This commit is contained in:
commit
8e8d681c7e
111
build.gradle
111
build.gradle
|
@ -1,20 +1,25 @@
|
|||
apply plugin: 'eclipse'
|
||||
apply plugin: 'idea'
|
||||
apply from: "./libraries.gradle"
|
||||
|
||||
allprojects {
|
||||
repositories {
|
||||
mavenCentral()
|
||||
mavenLocal()
|
||||
mavenRepo name: 'jboss-nexus', url: "https://repository.jboss.org/nexus/content/groups/public/"
|
||||
mavenCentral()
|
||||
mavenLocal()
|
||||
|
||||
|
||||
mavenRepo name: 'jboss-nexus', url: "http://repository.jboss.org/nexus/content/groups/public/"
|
||||
mavenRepo name: "jboss-snapshots", url: "http://snapshots.jboss.org/maven2/"
|
||||
}
|
||||
}
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
mavenCentral()
|
||||
mavenLocal()
|
||||
mavenRepo name: 'jboss-nexus', url: "https://repository.jboss.org/nexus/content/groups/public/"
|
||||
mavenCentral()
|
||||
mavenLocal()
|
||||
|
||||
|
||||
mavenRepo name: 'jboss-nexus', url: "http://repository.jboss.org/nexus/content/groups/public/"
|
||||
mavenRepo name: "jboss-snapshots", url: "http://snapshots.jboss.org/maven2/"
|
||||
}
|
||||
dependencies {
|
||||
|
@ -22,11 +27,11 @@ buildscript {
|
|||
}
|
||||
}
|
||||
|
||||
hibernateTargetVersion = '4.2.0-SNAPSHOT'
|
||||
ext.hibernateTargetVersion = '4.2.0-SNAPSHOT'
|
||||
|
||||
idea {
|
||||
project {
|
||||
jdkName = "1.6"
|
||||
languageLevel = '1.6'
|
||||
ipr {
|
||||
withXml { provider ->
|
||||
provider.node.component.find { it.@name == 'VcsDirectoryMappings' }.mapping.@vcs = 'Git'
|
||||
|
@ -46,64 +51,6 @@ idea {
|
|||
}
|
||||
}
|
||||
|
||||
// build a map of the dependency artifacts to use. Allows centralized definition of the version of artifacts to
|
||||
// use. In that respect it serves a role similar to <dependencyManagement> in Maven
|
||||
junitVersion = '4.10'
|
||||
h2Version = '1.2.145'
|
||||
bytemanVersion = '1.5.2'
|
||||
|
||||
libraries = [
|
||||
// Ant
|
||||
ant: 'org.apache.ant:ant:1.8.2',
|
||||
|
||||
// Antlr
|
||||
antlr: 'antlr:antlr:2.7.7',
|
||||
|
||||
// Annotations
|
||||
commons_annotations:
|
||||
'org.hibernate.common:hibernate-commons-annotations:4.0.1.Final@jar',
|
||||
jandex: 'org.jboss:jandex:1.1.0.Alpha1',
|
||||
classmate: 'com.fasterxml:classmate:0.5.4',
|
||||
|
||||
// Dom4J
|
||||
dom4j: 'dom4j:dom4j:1.6.1@jar',
|
||||
|
||||
// Javassist
|
||||
javassist: 'org.javassist:javassist:3.15.0-GA',
|
||||
|
||||
// javax
|
||||
jpa: 'org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Draft-7plus',
|
||||
jta: 'org.jboss.spec.javax.transaction:jboss-transaction-api_1.1_spec:1.0.0.Final',
|
||||
validation: 'javax.validation:validation-api:1.0.0.GA',
|
||||
jacc: 'org.jboss.spec.javax.security.jacc:jboss-jacc-api_1.4_spec:1.0.0.Final',
|
||||
|
||||
// logging
|
||||
logging: 'org.jboss.logging:jboss-logging:3.1.0.GA',
|
||||
logging_processor: 'org.jboss.logging:jboss-logging-processor:1.0.0.Final',
|
||||
|
||||
// jaxb task
|
||||
jaxb: 'com.sun.xml.bind:jaxb-xjc:2.2.5',
|
||||
jaxb2_basics: 'org.jvnet.jaxb2_commons:jaxb2-basics:0.6.3',
|
||||
jaxb2_ant: 'org.jvnet.jaxb2_commons:jaxb2-basics-ant:0.6.3',
|
||||
jaxb2_jaxb: 'org.jvnet.jaxb2_commons:jaxb2-basics-jaxb:2.2.4-1',
|
||||
jaxb2_jaxb_xjc: 'org.jvnet.jaxb2_commons:jaxb2-basics-jaxb-xjc:2.2.4-1',
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~ testing
|
||||
|
||||
// logging for testing
|
||||
log4j: 'log4j:log4j:1.2.16',
|
||||
|
||||
junit: "junit:junit:${junitVersion}",
|
||||
byteman: "org.jboss.byteman:byteman:${bytemanVersion}",
|
||||
byteman_install: "org.jboss.byteman:byteman-install:${bytemanVersion}",
|
||||
byteman_bmunit: "org.jboss.byteman:byteman-bmunit:${bytemanVersion}",
|
||||
jpa_modelgen: 'org.hibernate:hibernate-jpamodelgen:1.2.0.Final',
|
||||
shrinkwrap_api: 'org.jboss.shrinkwrap:shrinkwrap-api:1.0.0-beta-6',
|
||||
shrinkwrap: 'org.jboss.shrinkwrap:shrinkwrap-impl-base:1.0.0-beta-6',
|
||||
validator: 'org.hibernate:hibernate-validator:4.3.0.Final',
|
||||
h2: "com.h2database:h2:${h2Version}"
|
||||
]
|
||||
|
||||
|
||||
subprojects { subProject ->
|
||||
apply plugin: 'idea'
|
||||
apply plugin: 'eclipse'
|
||||
|
@ -142,7 +89,8 @@ subprojects { subProject ->
|
|||
}
|
||||
|
||||
}
|
||||
toolsJar = file("${System.getProperty('java.home')}/../lib/tools.jar")
|
||||
|
||||
ext.toolsJar = file("${System.getProperty('java.home')}/../lib/tools.jar")
|
||||
// appropriately inject the common dependencies into each sub-project
|
||||
dependencies {
|
||||
compile( libraries.logging )
|
||||
|
@ -150,12 +98,18 @@ subprojects { subProject ->
|
|||
testCompile( libraries.byteman )
|
||||
testCompile( libraries.byteman_install )
|
||||
testCompile( libraries.byteman_bmunit )
|
||||
testCompile files( toolsJar )
|
||||
testRuntime( libraries.log4j )
|
||||
testRuntime( libraries.slf4j_api )
|
||||
testRuntime( libraries.slf4j_log4j12 )
|
||||
testRuntime( libraries.jcl_slf4j )
|
||||
testRuntime( libraries.jcl_api )
|
||||
testRuntime( libraries.jcl )
|
||||
testRuntime( libraries.javassist )
|
||||
testRuntime( libraries.h2 )
|
||||
jbossLoggingTool( libraries.logging_processor )
|
||||
hibernateJpaModelGenTool( libraries.jpa_modelgen )
|
||||
jaxb( libraries.jaxb ){
|
||||
exclude group: "javax.xml.stream"
|
||||
}
|
||||
jaxb( libraries.jaxb )
|
||||
jaxb( libraries.jaxb2_basics )
|
||||
jaxb( libraries.jaxb2_ant )
|
||||
|
@ -163,16 +117,20 @@ subprojects { subProject ->
|
|||
jaxb( libraries.jaxb2_jaxb_xjc )
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0"
|
||||
}
|
||||
|
||||
aptDumpDir = subProject.file( "${buildDir}/tmp/apt" )
|
||||
if( ext.toolsJar.exists() ){
|
||||
dependencies{
|
||||
testCompile files( toolsJar )
|
||||
}
|
||||
}
|
||||
ext.aptDumpDir = subProject.file( "${buildDir}/tmp/apt" )
|
||||
|
||||
sourceSets.main {
|
||||
compileClasspath += configurations.provided
|
||||
}
|
||||
|
||||
sourceSets.all {
|
||||
originalJavaSrcDirs = java.srcDirs
|
||||
generatedLoggingSrcDir = file( "${buildDir}/generated-src/logging/${name}" )
|
||||
ext.originalJavaSrcDirs = java.srcDirs
|
||||
ext.generatedLoggingSrcDir = file( "${buildDir}/generated-src/logging/${name}" )
|
||||
java.srcDir generatedLoggingSrcDir
|
||||
}
|
||||
|
||||
|
@ -225,6 +183,8 @@ subprojects { subProject ->
|
|||
maxHeapSize = "1024m"
|
||||
}
|
||||
|
||||
|
||||
|
||||
processTestResources.doLast( {
|
||||
copy {
|
||||
from( sourceSets.test.java.srcDirs ) {
|
||||
|
@ -310,7 +270,7 @@ subprojects { subProject ->
|
|||
}
|
||||
}
|
||||
|
||||
subProject.basePomConfig = pomConfig
|
||||
subProject.ext.basePomConfig = pomConfig
|
||||
|
||||
configure(install.repositories.mavenInstaller) {
|
||||
pom.project pomConfig
|
||||
|
@ -339,7 +299,6 @@ subprojects { subProject ->
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
dependsOnChildren()
|
||||
|
||||
// This is a task that generates the gradlew scripts, allowing users to run gradle without having gradle installed
|
||||
|
@ -349,5 +308,5 @@ dependsOnChildren()
|
|||
// 2) /gradlew.bat which is the windows bat script for for executing builds
|
||||
// 3) /wrapper which is a directory named by the "jarPath" config which contains other needed files.
|
||||
task wrapper(type: Wrapper) {
|
||||
gradleVersion = '1.0-milestone-8a'
|
||||
gradleVersion = '1.1'
|
||||
}
|
||||
|
|
|
@ -49,11 +49,10 @@ dependencies {
|
|||
|
||||
idea {
|
||||
project {
|
||||
jdkName = "1.6"
|
||||
languageLevel = '1.6'
|
||||
}
|
||||
module {
|
||||
downloadSources = true
|
||||
downloadJavadoc = true
|
||||
javaVersion = '1.6'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ package org.hibernate.build.gradle.testing.database;
|
|||
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.internal.artifacts.dependencies.DefaultSelfResolvingDependency;
|
||||
/**
|
||||
* Database profile as defined by a directory named {@code jdbc} containing JDBC drivers.
|
||||
*
|
||||
|
@ -34,22 +33,16 @@ import org.gradle.api.internal.artifacts.dependencies.DefaultSelfResolvingDepend
|
|||
* @author Strong Liu
|
||||
*/
|
||||
public class JdbcDirectoryProfile extends AbstractDatabaseProfileImpl {
|
||||
private final Configuration jdbcDependencies;
|
||||
private final Configuration jdbcDependencies;
|
||||
|
||||
public JdbcDirectoryProfile(File jdbcDirectory, Project project) {
|
||||
super( jdbcDirectory.getParentFile(), project );
|
||||
jdbcDependencies = prepareConfiguration( getName() );
|
||||
DefaultSelfResolvingDependency dependency =
|
||||
new DefaultSelfResolvingDependency( project.files( jdbcDirectory.listFiles() ) );
|
||||
/* File [] jdbcDriverJars = jdbcDirectory.listFiles();*/
|
||||
jdbcDependencies.addDependency( dependency );
|
||||
/* project.dependencies {
|
||||
jdbcDependency files(jdbcDriverJars)
|
||||
}*/
|
||||
}
|
||||
public JdbcDirectoryProfile(File jdbcDirectory, Project project) {
|
||||
super( jdbcDirectory.getParentFile(), project );
|
||||
jdbcDependencies = prepareConfiguration( getName() );
|
||||
project.dependencies.add(getName(), project.files(jdbcDirectory.listFiles()))
|
||||
}
|
||||
|
||||
@Override
|
||||
public Configuration getTestingRuntimeConfiguration() {
|
||||
return jdbcDependencies;
|
||||
}
|
||||
@Override
|
||||
public Configuration getTestingRuntimeConfiguration() {
|
||||
return jdbcDependencies;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,7 +93,7 @@ class DatabaseAllocator {
|
|||
|
||||
public static DatabaseAllocator locate(Project project) {
|
||||
if ( ! project.rootProject.hasProperty( DB_ALLOCATOR_KEY ) ) {
|
||||
project.rootProject.setProperty( DB_ALLOCATOR_KEY, new DatabaseAllocator( project.rootProject ) );
|
||||
project.rootProject.ext.setProperty( DB_ALLOCATOR_KEY, new DatabaseAllocator( project.rootProject ) );
|
||||
}
|
||||
return (DatabaseAllocator) project.rootProject.properties[ DB_ALLOCATOR_KEY ];
|
||||
}
|
||||
|
|
|
@ -2,18 +2,18 @@ buildscript {
|
|||
repositories {
|
||||
mavenCentral()
|
||||
mavenLocal()
|
||||
mavenRepo name: 'jboss-nexus', url: "https://repository.jboss.org/nexus/content/groups/public/"
|
||||
mavenRepo name: 'jboss-nexus', url: "http://repository.jboss.org/nexus/content/groups/public/"
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.jboss.jdocbook:gradle-jdocbook:1.2.0"
|
||||
classpath "org.jboss.jdocbook:gradle-jdocbook:1.2.1"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: "java"
|
||||
apply plugin: "jdocbook"
|
||||
|
||||
ext.pressgangVersion = '3.0.0'
|
||||
dependencies {
|
||||
pressgangVersion = '3.0.0'
|
||||
|
||||
jdocbookXsl "org.jboss.pressgang:pressgang-xslt-ns:${pressgangVersion}"
|
||||
jdocbookXsl "org.jboss.pressgang:pressgang-fonts:${pressgangVersion}"
|
||||
jdocbookStyles "org.jboss.pressgang:pressgang-jdocbook-style:${pressgangVersion}"
|
||||
|
|
|
@ -1,9 +1,57 @@
|
|||
<?xml version='1.0' encoding='utf-8' ?>
|
||||
|
||||
<appendix xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xl="http://www.w3.org/1999/xlink">
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
|
||||
<title>Configuration properties</title>
|
||||
|
||||
<section xml:id="config-strategy">
|
||||
<title>Strategy configurations</title>
|
||||
<para>
|
||||
Many configuration settings define pluggable strategies that Hibernate uses for various purposes.
|
||||
The configuration of many of these strategy type settings accept definition in various forms. The
|
||||
documentation of such configuration settings refer here. The types of forms available in such cases
|
||||
include:
|
||||
</para>
|
||||
<variablelist>
|
||||
<varlistentry>
|
||||
<term>short name (if defined)</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Certain built-in strategy implementations have a corresponding short name.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>strategy instance</term>
|
||||
<listitem>
|
||||
<para>
|
||||
An instance of the strategy implementation to use can be specified
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>strategy Class reference</term>
|
||||
<listitem>
|
||||
<para>
|
||||
A <classname>java.lang.Class</classname> reference of the strategy implementation to use can
|
||||
be specified
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>strategy Class name</term>
|
||||
<listitem>
|
||||
<para>
|
||||
The class name (<classname>java.lang.String</classname>) of the strategy implementation to
|
||||
use can be specified
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
</section>
|
||||
|
||||
|
||||
<title>Configuration properties</title>
|
||||
<section>
|
||||
<title>General Configuration</title>
|
||||
<informaltable>
|
||||
|
@ -67,10 +115,9 @@
|
|||
</row>
|
||||
<row>
|
||||
<entry>hibernate.default_entity_mode</entry>
|
||||
<entry><para>One of <literal>dynamic-map</literal>, <literal>dom4j</literal>,
|
||||
<literal>pojo</literal></para></entry>
|
||||
<entry><para><literal>dynamic-map</literal> or <literal>pojo</literal></para></entry>
|
||||
<entry>Default mode for entity representation for all sessions opened from this
|
||||
<classname>SessionFactory</classname></entry>
|
||||
<classname>SessionFactory</classname>, defaults to <literal>pojo</literal>.</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>hibernate.order_updates</entry>
|
||||
|
@ -229,9 +276,16 @@
|
|||
<tbody>
|
||||
<row>
|
||||
<entry>hibernate.transaction.factory_class</entry>
|
||||
<entry>A fully-qualified classname</entry>
|
||||
<entry>The classname of a <classname>TransactionFactory</classname> to use with Hibernate Transaction API. The
|
||||
default is <classname>JDBCTransactionFactory</classname>).</entry>
|
||||
<entry>
|
||||
<para><property>jdbc</property> or <property></property></para>
|
||||
</entry>
|
||||
<entry>
|
||||
<para>
|
||||
Names the <interfacename>org.hibernate.engine.transaction.spi.TransactionFactory</interfacename>
|
||||
strategy implementation to use. See <xref linkend="config-strategy"/> and
|
||||
<xref linkend="services-TransactionFactory"/>
|
||||
</para>
|
||||
</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>jta.UserTransaction</entry>
|
||||
|
@ -359,8 +413,8 @@
|
|||
<note>
|
||||
<para>
|
||||
For information on specific configuration of Proxool, refer to the Proxool documentation available from
|
||||
<link xl:href="http://proxool.sourceforge.net/" />.
|
||||
<link xlink:href="http://proxool.sourceforge.net/" />.
|
||||
</para>
|
||||
</note>
|
||||
</section>
|
||||
</appendix>
|
||||
</appendix>
|
||||
|
|
|
@ -774,14 +774,13 @@
|
|||
<term>Initiator</term>
|
||||
<listitem>
|
||||
<para>
|
||||
<classname>org.hibernate.stat.internal.StatisticsInitiator</classname>
|
||||
<classname>org.hibernate.engine.transaction.internal.TransactionFactoryInitiator</classname>
|
||||
</para>
|
||||
<para>
|
||||
Defines a <property>hibernate.stats.factory</property> setting to allow
|
||||
configuring the
|
||||
<interfacename>org.hibernate.stat.spi.StatisticsFactory</interfacename> to use internally
|
||||
when building the actual
|
||||
<interfacename>org.hibernate.stat.Statistics</interfacename> instance.
|
||||
Defines a <property>hibernate.transaction.factory_class</property> setting to allow
|
||||
configuring which <interfacename>TransactionFactory</interfacename> to use.
|
||||
<property>hibernate.transaction.factory_class</property> follows the rules set forth
|
||||
under <xref linkend="config-strategy"/>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
@ -791,18 +790,20 @@
|
|||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
<classname>org.hibernate.engine.transaction.internal.jta.CMTTransactionFactory</classname> -
|
||||
A JTA-based strategy in which Hibernate is not controlling the transactions. An
|
||||
important distinction here is that interaction with the underlying JTA implementation
|
||||
is done through the
|
||||
<interfacename>javax.transaction.TransactionManager</interfacename>
|
||||
<classname>org.hibernate.engine.transaction.internal.jdbc.JdbcTransactionFactory</classname> -
|
||||
A non-JTA strategy in which the transactions are managed using the JDBC
|
||||
<interfacename>java.sql.Connection</interfacename>. This implementation's short
|
||||
name is <property>jdbc</property>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<classname>org.hibernate.engine.transaction.internal.jdbc.JdbcTransactionFactory</classname> -
|
||||
A non-JTA strategy in which the transactions are managed using the JDBC
|
||||
<interfacename>java.sql.Connection</interfacename>
|
||||
<classname>org.hibernate.engine.transaction.internal.jta.CMTTransactionFactory</classname> -
|
||||
A JTA-based strategy in which Hibernate is not controlling the transactions. An
|
||||
important distinction here is that interaction with the underlying JTA implementation
|
||||
is done through the
|
||||
<interfacename>javax.transaction.TransactionManager</interfacename>. This
|
||||
implementation's short name is <property>cmt</property>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
|
@ -811,7 +812,8 @@
|
|||
A JTA-based strategy in which Hibernate *may* be controlling the transactions. An
|
||||
important distinction here is that interaction with the underlying JTA
|
||||
implementation is done through the
|
||||
<interfacename>javax.transaction.UserTransaction</interfacename>
|
||||
<interfacename>javax.transaction.UserTransaction</interfacename>. This
|
||||
implementation's short name is <property>jta</property>.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
|
|
@ -65,7 +65,6 @@
|
|||
<xi:include href="content/query_criteria.xml" xmlns:xi="http://www.w3.org/2001/XInclude" />
|
||||
<xi:include href="content/query_sql.xml" xmlns:xi="http://www.w3.org/2001/XInclude" />
|
||||
<xi:include href="content/filters.xml" xmlns:xi="http://www.w3.org/2001/XInclude" />
|
||||
<xi:include href="content/xml.xml" xmlns:xi="http://www.w3.org/2001/XInclude" />
|
||||
|
||||
<xi:include href="content/performance.xml" xmlns:xi="http://www.w3.org/2001/XInclude" />
|
||||
|
||||
|
|
|
@ -309,7 +309,7 @@ hibernate.dialect = org.hibernate.dialect.PostgreSQL82Dialect</programlisting>
|
|||
above.</para>
|
||||
</warning></para>
|
||||
|
||||
<table frame="topbot" xml:id="configuration-optional-properties" revision="8">
|
||||
<table frame="topbot" xml:id="configuration-optional-properties" revision="9">
|
||||
<title>Hibernate Configuration Properties</title>
|
||||
|
||||
<tgroup cols="2">
|
||||
|
@ -408,9 +408,10 @@ hibernate.dialect = org.hibernate.dialect.PostgreSQL82Dialect</programlisting>
|
|||
<entry><property>hibernate.default_entity_mode</property></entry>
|
||||
|
||||
<entry>Sets a default mode for entity representation for all
|
||||
sessions opened from this <literal>SessionFactory</literal> <para>
|
||||
<literal>dynamic-map</literal>, <literal>dom4j</literal>,
|
||||
<literal>pojo</literal> </para></entry>
|
||||
sessions opened from this <literal>SessionFactory</literal>,
|
||||
defaults to <literal>pojo</literal>.<para>
|
||||
<emphasis role="strong">e.g.</emphasis> <literal>dynamic-map</literal> |
|
||||
<literal>pojo</literal> </para> </entry>
|
||||
</row>
|
||||
|
||||
<row>
|
||||
|
|
|
@ -322,7 +322,7 @@ public class DomesticCat extends Cat {
|
|||
key.</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="persistent-classes-dynamicmodels">
|
||||
<section xml:id="persistent-classes-dynamicmodels" revision="1">
|
||||
<title>Dynamic models</title>
|
||||
|
||||
<note>
|
||||
|
@ -335,8 +335,8 @@ public class DomesticCat extends Cat {
|
|||
<para>Persistent entities do not necessarily have to be represented as
|
||||
POJO classes or as JavaBean objects at runtime. Hibernate also supports
|
||||
dynamic models (using <literal>Map</literal>s of <literal>Map</literal>s
|
||||
at runtime) and the representation of entities as DOM4J trees. With this
|
||||
approach, you do not write persistent classes, only mapping files.</para>
|
||||
at runtime). With this approach, you do not write persistent classes,
|
||||
only mapping files.</para>
|
||||
|
||||
<para>By default, Hibernate works in normal POJO mode. You can set a
|
||||
default entity representation mode for a particular
|
||||
|
@ -444,9 +444,6 @@ dynamicSession.close()
|
|||
call <literal>flush()</literal> and <literal>close()</literal> on the
|
||||
secondary <literal>Session</literal>, and also leave the transaction and
|
||||
connection handling to the primary unit of work.</para>
|
||||
|
||||
<para>More information about the XML representation capabilities can be
|
||||
found in <xref linkend="xml" />.</para>
|
||||
</section>
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version='1.0' encoding="UTF-8"?>
|
||||
|
||||
<chapter xml:id="transactions" revision="2" xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<chapter xml:id="transactions" xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<title>Transactions and Concurrency</title>
|
||||
|
||||
<para>
|
||||
|
|
|
@ -1,288 +0,0 @@
|
|||
<?xml version='1.0' encoding="UTF-8"?>
|
||||
|
||||
<chapter xml:id="xml" xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<title>XML Mapping</title>
|
||||
|
||||
<para><emphasis>
|
||||
XML Mapping is an experimental feature in Hibernate 3.0 and is currently under
|
||||
active development.
|
||||
</emphasis></para>
|
||||
|
||||
<section xml:id="xml-intro" revision="1">
|
||||
<title>Working with XML data</title>
|
||||
|
||||
<para>
|
||||
Hibernate allows you to work with persistent XML data in much the same way
|
||||
you work with persistent POJOs. A parsed XML tree can be thought of
|
||||
as another way of representing the relational data at the object level,
|
||||
instead of POJOs.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Hibernate supports dom4j as API for manipulating XML trees. You can write
|
||||
queries that retrieve dom4j trees from the database and have any
|
||||
modification you make to the tree automatically synchronized to the
|
||||
database. You can even take an XML document, parse it using dom4j, and
|
||||
write it to the database with any of Hibernate's basic operations:
|
||||
<literal>persist(), saveOrUpdate(), merge(), delete(), replicate()</literal>
|
||||
(merging is not yet supported).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
This feature has many applications including data import/export,
|
||||
externalization of entity data via JMS or SOAP and XSLT-based reporting.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A single mapping can be used to simultaneously map properties of a class
|
||||
and nodes of an XML document to the database, or, if there is no class to map,
|
||||
it can be used to map just the XML.
|
||||
</para>
|
||||
|
||||
<section xml:id="xml-intro-mapping">
|
||||
<title>Specifying XML and class mapping together</title>
|
||||
|
||||
<para>
|
||||
Here is an example of mapping a POJO and XML simultaneously:
|
||||
</para>
|
||||
|
||||
<programlisting role="XML"><![CDATA[<class name="Account"
|
||||
table="ACCOUNTS"
|
||||
node="account">
|
||||
|
||||
<id name="accountId"
|
||||
column="ACCOUNT_ID"
|
||||
node="@id"/>
|
||||
|
||||
<many-to-one name="customer"
|
||||
column="CUSTOMER_ID"
|
||||
node="customer/@id"
|
||||
embed-xml="false"/>
|
||||
|
||||
<property name="balance"
|
||||
column="BALANCE"
|
||||
node="balance"/>
|
||||
|
||||
...
|
||||
|
||||
</class>]]></programlisting>
|
||||
</section>
|
||||
|
||||
<section xml:id="xml-onlyxml">
|
||||
<title>Specifying only an XML mapping</title>
|
||||
|
||||
<para>
|
||||
Here is an example where there is no POJO class:
|
||||
</para>
|
||||
|
||||
<programlisting role="XML"><![CDATA[<class entity-name="Account"
|
||||
table="ACCOUNTS"
|
||||
node="account">
|
||||
|
||||
<id name="id"
|
||||
column="ACCOUNT_ID"
|
||||
node="@id"
|
||||
type="string"/>
|
||||
|
||||
<many-to-one name="customerId"
|
||||
column="CUSTOMER_ID"
|
||||
node="customer/@id"
|
||||
embed-xml="false"
|
||||
entity-name="Customer"/>
|
||||
|
||||
<property name="balance"
|
||||
column="BALANCE"
|
||||
node="balance"
|
||||
type="big_decimal"/>
|
||||
|
||||
...
|
||||
|
||||
</class>]]></programlisting>
|
||||
|
||||
<para>
|
||||
This mapping allows you to access the data as a dom4j tree, or as a graph of
|
||||
property name/value pairs or java <literal>Map</literal>s. The property names
|
||||
are purely logical constructs that can be referred to in HQL queries.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="xml-mapping" revision="1">
|
||||
<title>XML mapping metadata</title>
|
||||
|
||||
<para>
|
||||
A range of Hibernate mapping elements accept the <literal>node</literal> attribute.
|
||||
This lets you specify the name of an XML attribute or element that holds the
|
||||
property or entity data. The format of the <literal>node</literal> attribute
|
||||
must be one of the following:
|
||||
</para>
|
||||
|
||||
<itemizedlist spacing="compact">
|
||||
<listitem>
|
||||
<para><literal>"element-name"</literal>: map to the named XML element</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para><literal>"@attribute-name"</literal>: map to the named XML attribute</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para><literal>"."</literal>: map to the parent element</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<literal>"element-name/@attribute-name"</literal>:
|
||||
map to the named attribute of the named element
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<para>
|
||||
For collections and single valued associations, there is an additional
|
||||
<literal>embed-xml</literal> attribute. If <literal>embed-xml="true"</literal>,
|
||||
the default, the XML tree for the associated entity (or collection of value type)
|
||||
will be embedded directly in the XML tree for the entity that owns the association.
|
||||
Otherwise, if <literal>embed-xml="false"</literal>, then only the referenced
|
||||
identifier value will appear in the XML for single point associations and
|
||||
collections will not appear at all.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Do not leave <literal>embed-xml="true"</literal> for
|
||||
too many associations, since XML does not deal well with circularity.
|
||||
</para>
|
||||
|
||||
<programlisting role="XML"><![CDATA[<class name="Customer"
|
||||
table="CUSTOMER"
|
||||
node="customer">
|
||||
|
||||
<id name="id"
|
||||
column="CUST_ID"
|
||||
node="@id"/>
|
||||
|
||||
<map name="accounts"
|
||||
node="."
|
||||
embed-xml="true">
|
||||
<key column="CUSTOMER_ID"
|
||||
not-null="true"/>
|
||||
<map-key column="SHORT_DESC"
|
||||
node="@short-desc"
|
||||
type="string"/>
|
||||
<one-to-many entity-name="Account"
|
||||
embed-xml="false"
|
||||
node="account"/>
|
||||
</map>
|
||||
|
||||
<component name="name"
|
||||
node="name">
|
||||
<property name="firstName"
|
||||
node="first-name"/>
|
||||
<property name="initial"
|
||||
node="initial"/>
|
||||
<property name="lastName"
|
||||
node="last-name"/>
|
||||
</component>
|
||||
|
||||
...
|
||||
|
||||
</class>]]></programlisting>
|
||||
|
||||
<para>
|
||||
In this case, the collection of account ids is embedded, but not
|
||||
the actual account data. The following HQL query:
|
||||
</para>
|
||||
|
||||
<programlisting><![CDATA[from Customer c left join fetch c.accounts where c.lastName like :lastName]]></programlisting>
|
||||
|
||||
<para>
|
||||
would return datasets such as this:
|
||||
</para>
|
||||
|
||||
<programlisting role="XML"><![CDATA[<customer xml:id="123456789">
|
||||
<account short-desc="Savings">987632567</account>
|
||||
<account short-desc="Credit Card">985612323</account>
|
||||
<name>
|
||||
<first-name>Gavin</first-name>
|
||||
<initial>A</initial>
|
||||
<last-name>King</last-name>
|
||||
</name>
|
||||
...
|
||||
</customer>]]></programlisting>
|
||||
|
||||
<para>
|
||||
If you set <literal>embed-xml="true"</literal> on the <literal><one-to-many></literal>
|
||||
mapping, the data might look more like this:
|
||||
</para>
|
||||
|
||||
<programlisting role="XML"><![CDATA[<customer xml:id="123456789">
|
||||
<account xml:id="987632567" short-desc="Savings">
|
||||
<customer xml:id="123456789"/>
|
||||
<balance>100.29</balance>
|
||||
</account>
|
||||
<account xml:id="985612323" short-desc="Credit Card">
|
||||
<customer xml:id="123456789"/>
|
||||
<balance>-2370.34</balance>
|
||||
</account>
|
||||
<name>
|
||||
<first-name>Gavin</first-name>
|
||||
<initial>A</initial>
|
||||
<last-name>King</last-name>
|
||||
</name>
|
||||
...
|
||||
</customer>]]></programlisting>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="xml-manipulation" revision="1">
|
||||
<title>Manipulating XML data</title>
|
||||
|
||||
<para>
|
||||
You can also re-read and update XML documents in the application. You can do this by
|
||||
obtaining a dom4j session:
|
||||
</para>
|
||||
|
||||
<programlisting role="JAVA"><![CDATA[Document doc = ....;
|
||||
|
||||
Session session = factory.openSession();
|
||||
Session dom4jSession = session.getSession(EntityMode.DOM4J);
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
List results = dom4jSession
|
||||
.createQuery("from Customer c left join fetch c.accounts where c.lastName like :lastName")
|
||||
.list();
|
||||
for ( int i=0; i<results.size(); i++ ) {
|
||||
//add the customer data to the XML document
|
||||
Element customer = (Element) results.get(i);
|
||||
doc.add(customer);
|
||||
}
|
||||
|
||||
tx.commit();
|
||||
session.close();]]></programlisting>
|
||||
|
||||
<programlisting role="JAVA"><![CDATA[Session session = factory.openSession();
|
||||
Session dom4jSession = session.getSession(EntityMode.DOM4J);
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
Element cust = (Element) dom4jSession.get("Customer", customerId);
|
||||
for ( int i=0; i<results.size(); i++ ) {
|
||||
Element customer = (Element) results.get(i);
|
||||
//change the customer name in the XML and database
|
||||
Element name = customer.element("name");
|
||||
name.element("first-name").setText(firstName);
|
||||
name.element("initial").setText(initial);
|
||||
name.element("last-name").setText(lastName);
|
||||
}
|
||||
|
||||
tx.commit();
|
||||
session.close();]]></programlisting>
|
||||
|
||||
<para>
|
||||
When implementing XML-based data import/export, it is useful to combine this feature with Hibernate's <literal>replicate()</literal>
|
||||
operation.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
</chapter>
|
||||
|
|
@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME
|
|||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=http\://services.gradle.org/distributions/gradle-1.0-milestone-8a-bin.zip
|
||||
distributionUrl=http\://services.gradle.org/distributions/gradle-1.1-bin.zip
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
apply plugin: 'java'
|
||||
|
||||
dependencies {
|
||||
provided( libraries.validation )
|
||||
compile project( ':hibernate-core' )
|
||||
compile "c3p0:c3p0:0.9.1"
|
||||
compile( libraries.c3p0 )
|
||||
|
||||
testCompile( libraries.validator ) {
|
||||
// for test runtime
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
apply plugin: 'java'
|
||||
apply plugin: 'antlr'
|
||||
apply plugin: org.hibernate.build.gradle.inject.InjectionPlugin
|
||||
apply plugin: org.hibernate.build.gradle.testing.matrix.MatrixTestingPlugin
|
||||
|
@ -21,6 +20,9 @@ dependencies {
|
|||
|
||||
testCompile( project(':hibernate-testing') )
|
||||
testCompile( libraries.validation )
|
||||
testCompile( libraries.jandex )
|
||||
testCompile( libraries.classmate )
|
||||
testCompile( libraries.mockito )
|
||||
testCompile( libraries.validator ) {
|
||||
// for test runtime
|
||||
transitive = true
|
||||
|
@ -36,7 +38,7 @@ manifest.mainAttributes(
|
|||
)
|
||||
|
||||
sourceSets.main {
|
||||
jaxbTargetDir = file( "${buildDir}/generated-src/jaxb/main" )
|
||||
ext.jaxbTargetDir = file( "${buildDir}/generated-src/jaxb/main" )
|
||||
java.srcDir jaxbTargetDir
|
||||
originalJavaSrcDirs = java.srcDirs
|
||||
}
|
||||
|
@ -53,18 +55,20 @@ idea {
|
|||
}
|
||||
|
||||
task jaxb {
|
||||
// output directory
|
||||
jaxbTargetDir = file( "${buildDir}/generated-src/jaxb/main" )
|
||||
ext {
|
||||
// output directory
|
||||
jaxbTargetDir = file( "${buildDir}/generated-src/jaxb/main" )
|
||||
|
||||
// input schemas
|
||||
cfgXsd = file( 'src/main/resources/org/hibernate/hibernate-configuration-4.0.xsd')
|
||||
hbmXsd = file( 'src/main/resources/org/hibernate/hibernate-mapping-4.0.xsd' )
|
||||
ormXsd = file( 'src/main/resources/org/hibernate/ejb/orm_2_0.xsd' )
|
||||
// input schemas
|
||||
cfgXsd = file( 'src/main/resources/org/hibernate/hibernate-configuration-4.0.xsd')
|
||||
hbmXsd = file( 'src/main/resources/org/hibernate/hibernate-mapping-4.0.xsd' )
|
||||
ormXsd = file( 'src/main/resources/org/hibernate/ejb/orm_2_0.xsd' )
|
||||
|
||||
// input bindings
|
||||
cfgXjb = file( 'src/main/xjb/hbm-configuration-bindings.xjb' )
|
||||
hbmXjb = file( 'src/main/xjb/hbm-mapping-bindings.xjb' )
|
||||
ormXjb = file( 'src/main/xjb/orm-bindings.xjb' )
|
||||
// input bindings
|
||||
cfgXjb = file( 'src/main/xjb/hbm-configuration-bindings.xjb' )
|
||||
hbmXjb = file( 'src/main/xjb/hbm-mapping-bindings.xjb' )
|
||||
ormXjb = file( 'src/main/xjb/orm-bindings.xjb' )
|
||||
}
|
||||
|
||||
// configure Gradle up-to-date checking
|
||||
inputs.files( [cfgXsd, hbmXsd, ormXsd, cfgXjb, hbmXjb, ormXjb] )
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.hibernate.internal.CoreMessageLogger;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public enum MultiTenancyStrategy {
|
||||
|
||||
/**
|
||||
* Multi-tenancy implemented by use of discriminator columns.
|
||||
*/
|
||||
|
@ -59,19 +58,21 @@ public enum MultiTenancyStrategy {
|
|||
MultiTenancyStrategy.class.getName()
|
||||
);
|
||||
|
||||
public static MultiTenancyStrategy fromConfigValue(Object value) {
|
||||
if ( value == null ) {
|
||||
public boolean requiresMultiTenantConnectionProvider() {
|
||||
return this == DATABASE || this == SCHEMA;
|
||||
}
|
||||
|
||||
public static MultiTenancyStrategy determineMultiTenancyStrategy(Map properties) {
|
||||
final Object strategy = properties.get( Environment.MULTI_TENANT );
|
||||
if ( strategy == null ) {
|
||||
return MultiTenancyStrategy.NONE;
|
||||
}
|
||||
|
||||
if ( MultiTenancyStrategy.class.isInstance( value ) ) {
|
||||
return (MultiTenancyStrategy) value;
|
||||
if ( MultiTenancyStrategy.class.isInstance( strategy ) ) {
|
||||
return (MultiTenancyStrategy) strategy;
|
||||
}
|
||||
|
||||
return fromExternalName( value.toString() );
|
||||
}
|
||||
|
||||
private static MultiTenancyStrategy fromExternalName(String strategyName) {
|
||||
final String strategyName = strategy.toString();
|
||||
try {
|
||||
return MultiTenancyStrategy.valueOf( strategyName.toUpperCase() );
|
||||
}
|
||||
|
@ -80,8 +81,4 @@ public enum MultiTenancyStrategy {
|
|||
return MultiTenancyStrategy.NONE;
|
||||
}
|
||||
}
|
||||
|
||||
public static MultiTenancyStrategy determineMultiTenancyStrategy(Map properties) {
|
||||
return fromConfigValue( properties.get( Environment.MULTI_TENANT ) );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.hibernate.event.service.spi.EventListenerGroup;
|
|||
import org.hibernate.event.spi.EventType;
|
||||
import org.hibernate.event.spi.PostDeleteEvent;
|
||||
import org.hibernate.event.spi.PostDeleteEventListener;
|
||||
import org.hibernate.event.spi.PostInsertEventListener;
|
||||
import org.hibernate.event.spi.PreDeleteEvent;
|
||||
import org.hibernate.event.spi.PreDeleteEventListener;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
@ -189,6 +190,13 @@ public final class EntityDeleteAction extends EntityAction {
|
|||
|
||||
@Override
|
||||
protected boolean hasPostCommitEventListeners() {
|
||||
return ! listenerGroup( EventType.POST_COMMIT_DELETE ).isEmpty();
|
||||
final EventListenerGroup<PostDeleteEventListener> group = listenerGroup( EventType.POST_COMMIT_DELETE );
|
||||
for ( PostDeleteEventListener listener : group.listeners() ) {
|
||||
if ( listener.requiresPostCommitHanding( getPersister() ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,7 +116,14 @@ public final class EntityIdentityInsertAction extends AbstractEntityInsertAction
|
|||
|
||||
@Override
|
||||
protected boolean hasPostCommitEventListeners() {
|
||||
return ! listenerGroup( EventType.POST_COMMIT_INSERT ).isEmpty();
|
||||
final EventListenerGroup<PostInsertEventListener> group = listenerGroup( EventType.POST_COMMIT_INSERT );
|
||||
for ( PostInsertEventListener listener : group.listeners() ) {
|
||||
if ( listener.requiresPostCommitHanding( getPersister() ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -204,7 +204,14 @@ public final class EntityInsertAction extends AbstractEntityInsertAction {
|
|||
|
||||
@Override
|
||||
protected boolean hasPostCommitEventListeners() {
|
||||
return ! listenerGroup( EventType.POST_COMMIT_INSERT ).isEmpty();
|
||||
final EventListenerGroup<PostInsertEventListener> group = listenerGroup( EventType.POST_COMMIT_INSERT );
|
||||
for ( PostInsertEventListener listener : group.listeners() ) {
|
||||
if ( listener.requiresPostCommitHanding( getPersister() ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isCachePutEnabled(EntityPersister persister, SessionImplementor session) {
|
||||
|
|
|
@ -277,7 +277,14 @@ public final class EntityUpdateAction extends EntityAction {
|
|||
|
||||
@Override
|
||||
protected boolean hasPostCommitEventListeners() {
|
||||
return ! listenerGroup( EventType.POST_COMMIT_UPDATE ).isEmpty();
|
||||
final EventListenerGroup<PostUpdateEventListener> group = listenerGroup( EventType.POST_COMMIT_UPDATE );
|
||||
for ( PostUpdateEventListener listener : group.listeners() ) {
|
||||
if ( listener.requiresPostCommitHanding( getPersister() ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -30,11 +30,11 @@ import org.hibernate.internal.util.compare.EqualsHelper;
|
|||
import org.hibernate.type.Type;
|
||||
|
||||
/**
|
||||
* Allows multiple entity classes / collection roles to be
|
||||
* stored in the same cache region. Also allows for composite
|
||||
* Allows multiple entity classes / collection roles to be stored in the same cache region. Also allows for composite
|
||||
* keys which do not properly implement equals()/hashCode().
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class CacheKey implements Serializable {
|
||||
private final Serializable key;
|
||||
|
@ -64,33 +64,13 @@ public class CacheKey implements Serializable {
|
|||
this.type = type;
|
||||
this.entityOrRoleName = entityOrRoleName;
|
||||
this.tenantId = tenantId;
|
||||
this.hashCode = type.getHashCode( key, factory );
|
||||
this.hashCode = calculateHashCode( type, factory );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
// Mainly for OSCache
|
||||
return entityOrRoleName + '#' + key.toString();//"CacheKey#" + type.toString(key, sf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if ( this == other ) {
|
||||
return true;
|
||||
}
|
||||
if ( hashCode != other.hashCode() || !(other instanceof CacheKey) ) {
|
||||
//hashCode is part of this check since it is pre-calculated and hash must match for equals to be true
|
||||
return false;
|
||||
}
|
||||
CacheKey that = (CacheKey) other;
|
||||
return entityOrRoleName.equals( that.entityOrRoleName ) &&
|
||||
type.isEqual( key, that.key ) &&
|
||||
EqualsHelper.equals( tenantId, that.tenantId );
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
private int calculateHashCode(Type type, SessionFactoryImplementor factory) {
|
||||
int result = type.getHashCode( key, factory );
|
||||
result = 31 * result + (tenantId != null ? tenantId.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
public Serializable getKey() {
|
||||
|
@ -101,4 +81,36 @@ public class CacheKey implements Serializable {
|
|||
return entityOrRoleName;
|
||||
}
|
||||
|
||||
public String getTenantId() {
|
||||
return tenantId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if ( other == null ) {
|
||||
return false;
|
||||
}
|
||||
if ( this == other ) {
|
||||
return true;
|
||||
}
|
||||
if ( hashCode != other.hashCode() || !( other instanceof CacheKey ) ) {
|
||||
//hashCode is part of this check since it is pre-calculated and hash must match for equals to be true
|
||||
return false;
|
||||
}
|
||||
CacheKey that = (CacheKey) other;
|
||||
return EqualsHelper.equals( entityOrRoleName, that.entityOrRoleName ) &&
|
||||
type.isEqual( key, that.key ) &&
|
||||
EqualsHelper.equals( tenantId, that.tenantId );
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
// Mainly for OSCache
|
||||
return entityOrRoleName + '#' + key.toString();//"CacheKey#" + type.toString(key, sf);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,8 @@
|
|||
*/
|
||||
package org.hibernate.cache.spi;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
@ -44,7 +46,7 @@ public class NaturalIdCacheKey implements Serializable {
|
|||
private final String entityName;
|
||||
private final String tenantId;
|
||||
private final int hashCode;
|
||||
private final transient ValueHolder<String> toString;
|
||||
private transient ValueHolder<String> toString;
|
||||
|
||||
/**
|
||||
* Construct a new key for a caching natural identifier resolutions into the second level cache.
|
||||
|
@ -73,7 +75,8 @@ public class NaturalIdCacheKey implements Serializable {
|
|||
result = prime * result + ( ( this.entityName == null ) ? 0 : this.entityName.hashCode() );
|
||||
result = prime * result + ( ( this.tenantId == null ) ? 0 : this.tenantId.hashCode() );
|
||||
for ( int i = 0; i < naturalIdValues.length; i++ ) {
|
||||
final Type type = propertyTypes[naturalIdPropertyIndexes[i]];
|
||||
final int naturalIdPropertyIndex = naturalIdPropertyIndexes[i];
|
||||
final Type type = propertyTypes[naturalIdPropertyIndex];
|
||||
final Object value = naturalIdValues[i];
|
||||
|
||||
result = prime * result + (value != null ? type.getHashCode( value, factory ) : 0);
|
||||
|
@ -82,25 +85,29 @@ public class NaturalIdCacheKey implements Serializable {
|
|||
}
|
||||
|
||||
this.hashCode = result;
|
||||
this.toString = new ValueHolder<String>(
|
||||
new ValueHolder.DeferredInitializer<String>() {
|
||||
@Override
|
||||
public String initialize() {
|
||||
//Complex toString is needed as naturalIds for entities are not simply based on a single value like primary keys
|
||||
//the only same way to differentiate the keys is to included the disassembled values in the string.
|
||||
final StringBuilder toStringBuilder = new StringBuilder( entityName ).append( "##NaturalId[" );
|
||||
for ( int i = 0; i < naturalIdValues.length; i++ ) {
|
||||
toStringBuilder.append( naturalIdValues[i] );
|
||||
if ( i + 1 < naturalIdValues.length ) {
|
||||
toStringBuilder.append( ", " );
|
||||
}
|
||||
}
|
||||
toStringBuilder.append( "]" );
|
||||
initTransients();
|
||||
}
|
||||
|
||||
private void initTransients() {
|
||||
this.toString = new ValueHolder<String>(
|
||||
new ValueHolder.DeferredInitializer<String>() {
|
||||
@Override
|
||||
public String initialize() {
|
||||
//Complex toString is needed as naturalIds for entities are not simply based on a single value like primary keys
|
||||
//the only same way to differentiate the keys is to included the disassembled values in the string.
|
||||
final StringBuilder toStringBuilder = new StringBuilder( entityName ).append( "##NaturalId[" );
|
||||
for ( int i = 0; i < naturalIdValues.length; i++ ) {
|
||||
toStringBuilder.append( naturalIdValues[i] );
|
||||
if ( i + 1 < naturalIdValues.length ) {
|
||||
toStringBuilder.append( ", " );
|
||||
}
|
||||
}
|
||||
toStringBuilder.append( "]" );
|
||||
|
||||
return toStringBuilder.toString();
|
||||
}
|
||||
}
|
||||
);
|
||||
return toStringBuilder.toString();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@SuppressWarnings( {"UnusedDeclaration"})
|
||||
|
@ -130,18 +137,27 @@ public class NaturalIdCacheKey implements Serializable {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if ( o == null ) {
|
||||
return false;
|
||||
}
|
||||
if ( this == o ) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if ( hashCode != o.hashCode() || !(o instanceof NaturalIdCacheKey) ) {
|
||||
|
||||
if ( hashCode != o.hashCode() || !( o instanceof NaturalIdCacheKey ) ) {
|
||||
//hashCode is part of this check since it is pre-calculated and hash must match for equals to be true
|
||||
return false;
|
||||
}
|
||||
|
||||
final NaturalIdCacheKey other = (NaturalIdCacheKey) o;
|
||||
return entityName.equals( other.entityName )
|
||||
return EqualsHelper.equals( entityName, other.entityName )
|
||||
&& EqualsHelper.equals( tenantId, other.tenantId )
|
||||
&& Arrays.deepEquals( this.naturalIdValues, other.naturalIdValues );
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream ois)
|
||||
throws ClassNotFoundException, IOException {
|
||||
ois.defaultReadObject();
|
||||
initTransients();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -285,7 +285,7 @@ public abstract class AbstractPropertyHolder implements PropertyHolder {
|
|||
Map<String, JoinTable> currentJoinTableOverride = buildJoinTableOverride( current, getPath() );
|
||||
currentOverride.putAll( columnOverride ); //subclasses have precedence over superclasses
|
||||
currentJoinOverride.putAll( joinColumnOverride ); //subclasses have precedence over superclasses
|
||||
currentJoinOverride.putAll( joinColumnOverride ); //subclasses have precedence over superclasses
|
||||
currentJoinTableOverride.putAll( joinTableOverride ); //subclasses have precedence over superclasses
|
||||
columnOverride = currentOverride;
|
||||
joinColumnOverride = currentJoinOverride;
|
||||
joinTableOverride = currentJoinTableOverride;
|
||||
|
|
|
@ -2496,6 +2496,7 @@ public final class AnnotationBinder {
|
|||
value.setPersistentClassName( persistentClassName );
|
||||
value.setMappings( mappings );
|
||||
value.setType( inferredData.getProperty(), inferredData.getClassOrElement() );
|
||||
value.setAccessType( propertyAccessor );
|
||||
id = value.make();
|
||||
}
|
||||
rootClass.setIdentifier( id );
|
||||
|
|
|
@ -642,4 +642,5 @@ public interface AvailableSettings {
|
|||
public static final String SCHEMA_MANAGEMENT_TOOL = "hibernate.schema_management_tool";
|
||||
// todo : add to Environment
|
||||
String SCHEMA_NAME_RESOLVER = "hibernate.schema_name_resolver";
|
||||
public static final String ENABLE_LAZY_LOAD_NO_TRANS = "hibernate.enable_lazy_load_no_trans";
|
||||
}
|
||||
|
|
|
@ -115,6 +115,136 @@ public class BinderHelper {
|
|||
return clone;
|
||||
}
|
||||
|
||||
// This is sooooooooo close in terms of not generating a synthetic property if we do not have to (where property ref
|
||||
// refers to a single property). The sticking point is cases where the `referencedPropertyName` come from subclasses
|
||||
// or secondary tables. Part of the problem is in PersistentClass itself during attempts to resolve the referenced
|
||||
// property; currently it only considers non-subclass and non-joined properties. Part of the problem is in terms
|
||||
// of SQL generation.
|
||||
// public static void createSyntheticPropertyReference(
|
||||
// Ejb3JoinColumn[] columns,
|
||||
// PersistentClass ownerEntity,
|
||||
// PersistentClass associatedEntity,
|
||||
// Value value,
|
||||
// boolean inverse,
|
||||
// Mappings mappings) {
|
||||
// //associated entity only used for more precise exception, yuk!
|
||||
// if ( columns[0].isImplicit() || StringHelper.isNotEmpty( columns[0].getMappedBy() ) ) return;
|
||||
// int fkEnum = Ejb3JoinColumn.checkReferencedColumnsType( columns, ownerEntity, mappings );
|
||||
// PersistentClass associatedClass = columns[0].getPropertyHolder() != null ?
|
||||
// columns[0].getPropertyHolder().getPersistentClass() :
|
||||
// null;
|
||||
// if ( Ejb3JoinColumn.NON_PK_REFERENCE == fkEnum ) {
|
||||
// //find properties associated to a certain column
|
||||
// Object columnOwner = findColumnOwner( ownerEntity, columns[0].getReferencedColumn(), mappings );
|
||||
// List<Property> properties = findPropertiesByColumns( columnOwner, columns, mappings );
|
||||
//
|
||||
// if ( properties == null ) {
|
||||
// //TODO use a ToOne type doing a second select
|
||||
// StringBuilder columnsList = new StringBuilder();
|
||||
// columnsList.append( "referencedColumnNames(" );
|
||||
// for (Ejb3JoinColumn column : columns) {
|
||||
// columnsList.append( column.getReferencedColumn() ).append( ", " );
|
||||
// }
|
||||
// columnsList.setLength( columnsList.length() - 2 );
|
||||
// columnsList.append( ") " );
|
||||
//
|
||||
// if ( associatedEntity != null ) {
|
||||
// //overidden destination
|
||||
// columnsList.append( "of " )
|
||||
// .append( associatedEntity.getEntityName() )
|
||||
// .append( "." )
|
||||
// .append( columns[0].getPropertyName() )
|
||||
// .append( " " );
|
||||
// }
|
||||
// else {
|
||||
// if ( columns[0].getPropertyHolder() != null ) {
|
||||
// columnsList.append( "of " )
|
||||
// .append( columns[0].getPropertyHolder().getEntityName() )
|
||||
// .append( "." )
|
||||
// .append( columns[0].getPropertyName() )
|
||||
// .append( " " );
|
||||
// }
|
||||
// }
|
||||
// columnsList.append( "referencing " )
|
||||
// .append( ownerEntity.getEntityName() )
|
||||
// .append( " not mapped to a single property" );
|
||||
// throw new AnnotationException( columnsList.toString() );
|
||||
// }
|
||||
//
|
||||
// final String referencedPropertyName;
|
||||
//
|
||||
// if ( properties.size() == 1 ) {
|
||||
// referencedPropertyName = properties.get(0).getName();
|
||||
// }
|
||||
// else {
|
||||
// // Create a synthetic (embedded composite) property to use as the referenced property which
|
||||
// // contains all the properties mapped to the referenced columns. We need to make a shallow copy
|
||||
// // of the properties to mark them as non-insertable/updatable.
|
||||
//
|
||||
// // todo : what if the columns all match with an existing component?
|
||||
//
|
||||
// StringBuilder propertyNameBuffer = new StringBuilder( "_" );
|
||||
// propertyNameBuffer.append( associatedClass.getEntityName().replace( '.', '_' ) );
|
||||
// propertyNameBuffer.append( "_" ).append( columns[0].getPropertyName() );
|
||||
// String syntheticPropertyName = propertyNameBuffer.toString();
|
||||
// //create an embeddable component
|
||||
//
|
||||
// //todo how about properties.size() == 1, this should be much simpler
|
||||
// Component embeddedComp = columnOwner instanceof PersistentClass ?
|
||||
// new Component( mappings, (PersistentClass) columnOwner ) :
|
||||
// new Component( mappings, (Join) columnOwner );
|
||||
// embeddedComp.setEmbedded( true );
|
||||
// embeddedComp.setNodeName( syntheticPropertyName );
|
||||
// embeddedComp.setComponentClassName( embeddedComp.getOwner().getClassName() );
|
||||
// for (Property property : properties) {
|
||||
// Property clone = BinderHelper.shallowCopy( property );
|
||||
// clone.setInsertable( false );
|
||||
// clone.setUpdateable( false );
|
||||
// clone.setNaturalIdentifier( false );
|
||||
// clone.setGeneration( property.getGeneration() );
|
||||
// embeddedComp.addProperty( clone );
|
||||
// }
|
||||
// SyntheticProperty synthProp = new SyntheticProperty();
|
||||
// synthProp.setName( syntheticPropertyName );
|
||||
// synthProp.setNodeName( syntheticPropertyName );
|
||||
// synthProp.setPersistentClass( ownerEntity );
|
||||
// synthProp.setUpdateable( false );
|
||||
// synthProp.setInsertable( false );
|
||||
// synthProp.setValue( embeddedComp );
|
||||
// synthProp.setPropertyAccessorName( "embedded" );
|
||||
// ownerEntity.addProperty( synthProp );
|
||||
// //make it unique
|
||||
// TableBinder.createUniqueConstraint( embeddedComp );
|
||||
//
|
||||
// referencedPropertyName = syntheticPropertyName;
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * creating the property ref to the new synthetic property
|
||||
// */
|
||||
// if ( value instanceof ToOne ) {
|
||||
// ( (ToOne) value ).setReferencedPropertyName( referencedPropertyName );
|
||||
// mappings.addUniquePropertyReference( ownerEntity.getEntityName(), referencedPropertyName );
|
||||
// }
|
||||
// else if ( value instanceof Collection ) {
|
||||
// ( (Collection) value ).setReferencedPropertyName( referencedPropertyName );
|
||||
// //not unique because we could create a mtm wo association table
|
||||
// mappings.addPropertyReference( ownerEntity.getEntityName(), referencedPropertyName );
|
||||
// }
|
||||
// else {
|
||||
// throw new AssertionFailure(
|
||||
// "Do a property ref on an unexpected Value type: "
|
||||
// + value.getClass().getName()
|
||||
// );
|
||||
// }
|
||||
// mappings.addPropertyReferencedAssociation(
|
||||
// ( inverse ? "inverse__" : "" ) + associatedClass.getEntityName(),
|
||||
// columns[0].getPropertyName(),
|
||||
// referencedPropertyName
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
public static void createSyntheticPropertyReference(
|
||||
Ejb3JoinColumn[] columns,
|
||||
PersistentClass ownerEntity,
|
||||
|
@ -138,15 +268,15 @@ public class BinderHelper {
|
|||
*/
|
||||
StringBuilder propertyNameBuffer = new StringBuilder( "_" );
|
||||
propertyNameBuffer.append( associatedClass.getEntityName().replace( '.', '_' ) );
|
||||
propertyNameBuffer.append( "_" ).append( columns[0].getPropertyName() );
|
||||
propertyNameBuffer.append( "_" ).append( columns[0].getPropertyName().replace( '.', '_' ) );
|
||||
String syntheticPropertyName = propertyNameBuffer.toString();
|
||||
//find properties associated to a certain column
|
||||
Object columnOwner = findColumnOwner( ownerEntity, columns[0].getReferencedColumn(), mappings );
|
||||
List<Property> properties = findPropertiesByColumns( columnOwner, columns, mappings );
|
||||
//create an embeddable component
|
||||
Property synthProp = null;
|
||||
Property synthProp = null;
|
||||
if ( properties != null ) {
|
||||
//todo how about properties.size() == 1, this should be much simpler
|
||||
//todo how about properties.size() == 1, this should be much simpler
|
||||
Component embeddedComp = columnOwner instanceof PersistentClass ?
|
||||
new Component( mappings, (PersistentClass) columnOwner ) :
|
||||
new Component( mappings, (Join) columnOwner );
|
||||
|
@ -160,8 +290,8 @@ public class BinderHelper {
|
|||
clone.setNaturalIdentifier( false );
|
||||
clone.setGeneration( property.getGeneration() );
|
||||
embeddedComp.addProperty( clone );
|
||||
}
|
||||
synthProp = new SyntheticProperty();
|
||||
}
|
||||
synthProp = new SyntheticProperty();
|
||||
synthProp.setName( syntheticPropertyName );
|
||||
synthProp.setNodeName( syntheticPropertyName );
|
||||
synthProp.setPersistentClass( ownerEntity );
|
||||
|
@ -170,9 +300,9 @@ public class BinderHelper {
|
|||
synthProp.setValue( embeddedComp );
|
||||
synthProp.setPropertyAccessorName( "embedded" );
|
||||
ownerEntity.addProperty( synthProp );
|
||||
//make it unique
|
||||
//make it unique
|
||||
TableBinder.createUniqueConstraint( embeddedComp );
|
||||
}
|
||||
}
|
||||
else {
|
||||
//TODO use a ToOne type doing a second select
|
||||
StringBuilder columnsList = new StringBuilder();
|
||||
|
@ -700,7 +830,7 @@ public class BinderHelper {
|
|||
for (int i = 0; i < aliases.length; i++){
|
||||
if (StringHelper.isNotEmpty(aliases[i].table())){
|
||||
ret.put(aliases[i].alias(), aliases[i].table());
|
||||
}
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -1345,18 +1345,7 @@ public class Configuration implements Serializable {
|
|||
metadataSourceQueue.processMetadata( determineMetadataSourcePrecedence() );
|
||||
}
|
||||
|
||||
// process cache queue
|
||||
{
|
||||
for ( CacheHolder holder : caches ) {
|
||||
if ( holder.isClass ) {
|
||||
applyCacheConcurrencyStrategy( holder );
|
||||
}
|
||||
else {
|
||||
applyCollectionCacheConcurrencyStrategy( holder );
|
||||
}
|
||||
}
|
||||
caches.clear();
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
inSecondPass = true;
|
||||
|
@ -1376,6 +1365,19 @@ public class Configuration implements Serializable {
|
|||
throw ( RuntimeException ) e.getCause();
|
||||
}
|
||||
|
||||
// process cache queue
|
||||
{
|
||||
for ( CacheHolder holder : caches ) {
|
||||
if ( holder.isClass ) {
|
||||
applyCacheConcurrencyStrategy( holder );
|
||||
}
|
||||
else {
|
||||
applyCollectionCacheConcurrencyStrategy( holder );
|
||||
}
|
||||
}
|
||||
caches.clear();
|
||||
}
|
||||
|
||||
for ( Map.Entry<Table, List<UniqueConstraintHolder>> tableListEntry : uniqueConstraintHoldersByTable.entrySet() ) {
|
||||
final Table table = tableListEntry.getKey();
|
||||
final List<UniqueConstraintHolder> uniqueConstraints = tableListEntry.getValue();
|
||||
|
|
|
@ -47,6 +47,8 @@ import org.hibernate.internal.util.StringHelper;
|
|||
import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||
import org.hibernate.service.ServiceRegistry;
|
||||
import org.hibernate.service.classloading.spi.ClassLoaderService;
|
||||
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
|
||||
import org.hibernate.service.jdbc.connections.spi.MultiTenantConnectionProvider;
|
||||
import org.hibernate.service.jta.platform.spi.JtaPlatform;
|
||||
import org.hibernate.tuple.entity.EntityTuplizerFactory;
|
||||
|
||||
|
@ -152,6 +154,12 @@ public class SettingsFactory implements Serializable {
|
|||
}
|
||||
settings.setJdbcFetchSize(statementFetchSize);
|
||||
|
||||
MultiTenancyStrategy multiTenancyStrategy = MultiTenancyStrategy.determineMultiTenancyStrategy( properties );
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "multi-tenancy strategy : %s", multiTenancyStrategy );
|
||||
}
|
||||
settings.setMultiTenancyStrategy( multiTenancyStrategy );
|
||||
|
||||
String releaseModeName = ConfigurationHelper.getString( Environment.RELEASE_CONNECTIONS, properties, "auto" );
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Connection release mode: %s", releaseModeName );
|
||||
|
@ -162,10 +170,15 @@ public class SettingsFactory implements Serializable {
|
|||
}
|
||||
else {
|
||||
releaseMode = ConnectionReleaseMode.parse( releaseModeName );
|
||||
if ( releaseMode == ConnectionReleaseMode.AFTER_STATEMENT &&
|
||||
! jdbcServices.getConnectionProvider().supportsAggressiveRelease() ) {
|
||||
LOG.unsupportedAfterStatement();
|
||||
releaseMode = ConnectionReleaseMode.AFTER_TRANSACTION;
|
||||
if ( releaseMode == ConnectionReleaseMode.AFTER_STATEMENT ) {
|
||||
// we need to make sure the underlying JDBC connection access supports aggressive release...
|
||||
boolean supportsAgrressiveRelease = multiTenancyStrategy.requiresMultiTenantConnectionProvider()
|
||||
? serviceRegistry.getService( MultiTenantConnectionProvider.class ).supportsAggressiveRelease()
|
||||
: serviceRegistry.getService( ConnectionProvider.class ).supportsAggressiveRelease();
|
||||
if ( ! supportsAgrressiveRelease ) {
|
||||
LOG.unsupportedAfterStatement();
|
||||
releaseMode = ConnectionReleaseMode.AFTER_TRANSACTION;
|
||||
}
|
||||
}
|
||||
}
|
||||
settings.setConnectionReleaseMode( releaseMode );
|
||||
|
@ -303,12 +316,6 @@ public class SettingsFactory implements Serializable {
|
|||
}
|
||||
settings.setCheckNullability(checkNullability);
|
||||
|
||||
MultiTenancyStrategy multiTenancyStrategy = MultiTenancyStrategy.determineMultiTenancyStrategy( properties );
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "multi-tenancy strategy : %s", multiTenancyStrategy );
|
||||
}
|
||||
settings.setMultiTenancyStrategy( multiTenancyStrategy );
|
||||
|
||||
// TODO: Does EntityTuplizerFactory really need to be configurable? revisit for HHH-6383
|
||||
settings.setEntityTuplizerFactory( new EntityTuplizerFactory() );
|
||||
|
||||
|
|
|
@ -1036,7 +1036,7 @@ public abstract class CollectionBinder {
|
|||
}
|
||||
else {
|
||||
keyVal = (KeyValue) collValue.getOwner()
|
||||
.getRecursiveProperty( propRef )
|
||||
.getReferencedProperty( propRef )
|
||||
.getValue();
|
||||
}
|
||||
DependantValue key = new DependantValue( mappings, collValue.getCollectionTable(), keyVal );
|
||||
|
@ -1317,6 +1317,8 @@ public abstract class CollectionBinder {
|
|||
}
|
||||
elementBinder.setColumns( elementColumns );
|
||||
elementBinder.setType( property, elementClass );
|
||||
elementBinder.setPersistentClassName( propertyHolder.getEntityName() );
|
||||
elementBinder.setAccessType( accessType );
|
||||
collValue.setElement( elementBinder.make() );
|
||||
String orderBy = adjustUserSuppliedValueCollectionOrderingFragment( hqlOrderBy );
|
||||
if ( orderBy != null ) {
|
||||
|
@ -1467,4 +1469,4 @@ public abstract class CollectionBinder {
|
|||
public void setLocalGenerators(HashMap<String, IdGenerator> localGenerators) {
|
||||
this.localGenerators = localGenerators;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -210,26 +210,25 @@ public class MapBinder extends CollectionBinder {
|
|||
}
|
||||
}
|
||||
|
||||
PersistentClass owner = mapValue.getOwner();
|
||||
AccessType accessType;
|
||||
// FIXME support @Access for collection of elements
|
||||
// String accessType = access != null ? access.value() : null;
|
||||
if ( owner.getIdentifierProperty() != null ) {
|
||||
accessType = owner.getIdentifierProperty().getPropertyAccessorName().equals( "property" ) ? AccessType.PROPERTY
|
||||
: AccessType.FIELD;
|
||||
}
|
||||
else if ( owner.getIdentifierMapper() != null && owner.getIdentifierMapper().getPropertySpan() > 0 ) {
|
||||
Property prop = (Property) owner.getIdentifierMapper().getPropertyIterator().next();
|
||||
accessType = prop.getPropertyAccessorName().equals( "property" ) ? AccessType.PROPERTY
|
||||
: AccessType.FIELD;
|
||||
}
|
||||
else {
|
||||
throw new AssertionFailure( "Unable to guess collection property accessor name" );
|
||||
}
|
||||
|
||||
if ( AnnotatedClassType.EMBEDDABLE.equals( classType ) ) {
|
||||
EntityBinder entityBinder = new EntityBinder();
|
||||
PersistentClass owner = mapValue.getOwner();
|
||||
boolean isPropertyAnnotated;
|
||||
//FIXME support @Access for collection of elements
|
||||
//String accessType = access != null ? access.value() : null;
|
||||
if ( owner.getIdentifierProperty() != null ) {
|
||||
isPropertyAnnotated = owner.getIdentifierProperty()
|
||||
.getPropertyAccessorName()
|
||||
.equals( "property" );
|
||||
}
|
||||
else
|
||||
if ( owner.getIdentifierMapper() != null && owner.getIdentifierMapper().getPropertySpan() > 0 ) {
|
||||
Property prop = (Property) owner.getIdentifierMapper().getPropertyIterator().next();
|
||||
isPropertyAnnotated = prop.getPropertyAccessorName().equals( "property" );
|
||||
}
|
||||
else {
|
||||
throw new AssertionFailure( "Unable to guess collection property accessor name" );
|
||||
}
|
||||
|
||||
|
||||
PropertyData inferredData;
|
||||
if ( isHibernateExtensionMapping() ) {
|
||||
|
@ -242,7 +241,7 @@ public class MapBinder extends CollectionBinder {
|
|||
|
||||
//TODO be smart with isNullable
|
||||
Component component = AnnotationBinder.fillComponent(
|
||||
holder, inferredData, isPropertyAnnotated ? AccessType.PROPERTY : AccessType.FIELD, true,
|
||||
holder, inferredData, accessType, true,
|
||||
entityBinder, false, false,
|
||||
true, mappings, inheritanceStatePerClass
|
||||
);
|
||||
|
@ -285,6 +284,8 @@ public class MapBinder extends CollectionBinder {
|
|||
else {
|
||||
elementBinder.setType( property, elementClass );
|
||||
}
|
||||
elementBinder.setPersistentClassName( propertyHolder.getEntityName() );
|
||||
elementBinder.setAccessType( accessType );
|
||||
mapValue.setIndex( elementBinder.make() );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -188,6 +188,7 @@ public class PropertyBinder {
|
|||
simpleValueBinder.setType( property, returnedClass );
|
||||
simpleValueBinder.setMappings( mappings );
|
||||
simpleValueBinder.setReferencedEntityName( referencedEntityName );
|
||||
simpleValueBinder.setAccessType( accessType );
|
||||
SimpleValue propertyValue = simpleValueBinder.make();
|
||||
setValue( propertyValue );
|
||||
return makeProperty();
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
package org.hibernate.cfg.annotations;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.TypeVariable;
|
||||
import java.sql.Types;
|
||||
import java.util.Calendar;
|
||||
|
@ -40,15 +41,15 @@ import javax.persistence.MapKeyTemporal;
|
|||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.AnnotationException;
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.annotations.Parameter;
|
||||
import org.hibernate.annotations.Type;
|
||||
import org.hibernate.annotations.common.reflection.XClass;
|
||||
import org.hibernate.annotations.common.reflection.XProperty;
|
||||
import org.hibernate.annotations.common.util.ReflectHelper;
|
||||
import org.hibernate.cfg.AccessType;
|
||||
import org.hibernate.cfg.AttributeConverterDefinition;
|
||||
import org.hibernate.cfg.BinderHelper;
|
||||
import org.hibernate.cfg.Ejb3Column;
|
||||
|
@ -67,18 +68,25 @@ import org.hibernate.type.PrimitiveCharacterArrayClobType;
|
|||
import org.hibernate.type.SerializableToBlobType;
|
||||
import org.hibernate.type.StandardBasicTypes;
|
||||
import org.hibernate.type.WrappedMaterializedBlobType;
|
||||
import org.hibernate.usertype.DynamicParameterizedType;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* @author Emmanuel Bernard
|
||||
*/
|
||||
public class SimpleValueBinder {
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, SimpleValueBinder.class.getName());
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
|
||||
CoreMessageLogger.class,
|
||||
SimpleValueBinder.class.getName()
|
||||
);
|
||||
|
||||
private String propertyName;
|
||||
private String returnedClassName;
|
||||
private Ejb3Column[] columns;
|
||||
private String persistentClassName;
|
||||
private String explicitType = "";
|
||||
private String defaultType = "";
|
||||
private Properties typeParameters = new Properties();
|
||||
private Mappings mappings;
|
||||
private Table table;
|
||||
|
@ -88,6 +96,8 @@ public class SimpleValueBinder {
|
|||
//is a Map key
|
||||
private boolean key;
|
||||
private String referencedEntityName;
|
||||
private XProperty xproperty;
|
||||
private AccessType accessType;
|
||||
|
||||
private AttributeConverterDefinition attributeConverterDefinition;
|
||||
|
||||
|
@ -113,6 +123,10 @@ public class SimpleValueBinder {
|
|||
|
||||
public void setReturnedClassName(String returnedClassName) {
|
||||
this.returnedClassName = returnedClassName;
|
||||
|
||||
if ( defaultType.length() == 0 ) {
|
||||
defaultType = returnedClassName;
|
||||
}
|
||||
}
|
||||
|
||||
public void setTable(Table table) {
|
||||
|
@ -140,10 +154,17 @@ public class SimpleValueBinder {
|
|||
returnedClassOrElement = property.getElementClass();
|
||||
isArray = true;
|
||||
}
|
||||
this.xproperty = property;
|
||||
Properties typeParameters = this.typeParameters;
|
||||
typeParameters.clear();
|
||||
String type = BinderHelper.ANNOTATION_STRING_DEFAULT;
|
||||
if ( ( !key && property.isAnnotationPresent( Temporal.class ) )
|
||||
|
||||
Type annType = property.getAnnotation( Type.class );
|
||||
if ( annType != null ) {
|
||||
setExplicitType( annType );
|
||||
type = explicitType;
|
||||
}
|
||||
else if ( ( !key && property.isAnnotationPresent( Temporal.class ) )
|
||||
|| ( key && property.isAnnotationPresent( MapKeyTemporal.class ) ) ) {
|
||||
|
||||
boolean isDate;
|
||||
|
@ -179,6 +200,7 @@ public class SimpleValueBinder {
|
|||
default:
|
||||
throw new AssertionFailure( "Unknown temporal type: " + temporalType );
|
||||
}
|
||||
explicitType = type;
|
||||
}
|
||||
else if ( property.isAnnotationPresent( Lob.class ) ) {
|
||||
if ( mappings.getReflectionManager().equals( returnedClassOrElement, java.sql.Clob.class ) ) {
|
||||
|
@ -215,58 +237,43 @@ public class SimpleValueBinder {
|
|||
else {
|
||||
type = "blob";
|
||||
}
|
||||
explicitType = type;
|
||||
}
|
||||
//implicit type will check basic types and Serializable classes
|
||||
else if ( ( !key && property.isAnnotationPresent( Enumerated.class ) )
|
||||
|| ( key && property.isAnnotationPresent( MapKeyEnumerated.class ) ) ) {
|
||||
type = EnumType.class.getName();
|
||||
explicitType = type;
|
||||
}
|
||||
|
||||
// implicit type will check basic types and Serializable classes
|
||||
if ( columns == null ) {
|
||||
throw new AssertionFailure( "SimpleValueBinder.setColumns should be set before SimpleValueBinder.setType" );
|
||||
}
|
||||
|
||||
if ( BinderHelper.ANNOTATION_STRING_DEFAULT.equals( type ) ) {
|
||||
if ( returnedClassOrElement.isEnum() ) {
|
||||
type = EnumType.class.getName();
|
||||
typeParameters = new Properties();
|
||||
typeParameters.setProperty( EnumType.ENUM, returnedClassOrElement.getName() );
|
||||
String schema = columns[0].getTable().getSchema();
|
||||
schema = schema == null ? "" : schema;
|
||||
String catalog = columns[0].getTable().getCatalog();
|
||||
catalog = catalog == null ? "" : catalog;
|
||||
typeParameters.setProperty( EnumType.SCHEMA, schema );
|
||||
typeParameters.setProperty( EnumType.CATALOG, catalog );
|
||||
typeParameters.setProperty( EnumType.TABLE, columns[0].getTable().getName() );
|
||||
typeParameters.setProperty( EnumType.COLUMN, columns[0].getName() );
|
||||
javax.persistence.EnumType enumType = getEnumType( property );
|
||||
if ( enumType != null ) {
|
||||
if ( javax.persistence.EnumType.ORDINAL.equals( enumType ) ) {
|
||||
typeParameters.setProperty( EnumType.TYPE, String.valueOf( Types.INTEGER ) );
|
||||
}
|
||||
else if ( javax.persistence.EnumType.STRING.equals( enumType ) ) {
|
||||
typeParameters.setProperty( EnumType.TYPE, String.valueOf( Types.VARCHAR ) );
|
||||
}
|
||||
else {
|
||||
throw new AssertionFailure( "Unknown EnumType: " + enumType );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
explicitType = type;
|
||||
|
||||
defaultType = BinderHelper.isEmptyAnnotationValue( type ) ? returnedClassName : type;
|
||||
this.typeParameters = typeParameters;
|
||||
Type annType = property.getAnnotation( Type.class );
|
||||
setExplicitType( annType );
|
||||
|
||||
applyAttributeConverter( property );
|
||||
}
|
||||
|
||||
private void applyAttributeConverter(XProperty property) {
|
||||
final boolean canBeConverted = ! property.isAnnotationPresent( Id.class )
|
||||
&& ! isVersion
|
||||
&& ! isAssociation()
|
||||
&& ! property.isAnnotationPresent( Temporal.class )
|
||||
&& ! property.isAnnotationPresent( Enumerated.class );
|
||||
final boolean canBeConverted = !property.isAnnotationPresent( Id.class )
|
||||
&& !isVersion
|
||||
&& !isAssociation()
|
||||
&& !property.isAnnotationPresent( Temporal.class )
|
||||
&& !property.isAnnotationPresent( Enumerated.class );
|
||||
|
||||
if ( canBeConverted ) {
|
||||
// @Convert annotations take precedence
|
||||
final Convert convertAnnotation = locateConvertAnnotation( property );
|
||||
if ( convertAnnotation != null ) {
|
||||
if ( ! convertAnnotation.disableConversion() ) {
|
||||
if ( !convertAnnotation.disableConversion() ) {
|
||||
attributeConverterDefinition = mappings.locateAttributeConverter( convertAnnotation.converter() );
|
||||
}
|
||||
}
|
||||
|
@ -308,10 +315,12 @@ public class SimpleValueBinder {
|
|||
final XClass owner;
|
||||
try {
|
||||
final Class ownerClass = ReflectHelper.classForName( persistentClassName );
|
||||
owner = mappings.getReflectionManager().classForName( persistentClassName, ownerClass );
|
||||
owner = mappings.getReflectionManager().classForName( persistentClassName, ownerClass );
|
||||
}
|
||||
catch (ClassNotFoundException e) {
|
||||
throw new AnnotationException( "Unable to resolve Class reference during attempt to locate @Convert annotations" );
|
||||
catch ( ClassNotFoundException e ) {
|
||||
throw new AnnotationException(
|
||||
"Unable to resolve Class reference during attempt to locate @Convert annotations"
|
||||
);
|
||||
}
|
||||
|
||||
return lookForEntityDefinedConvertAnnotation( property, owner );
|
||||
|
@ -416,7 +425,7 @@ public class SimpleValueBinder {
|
|||
}
|
||||
|
||||
private static Class getWrapperEquivalent(Class primitive) {
|
||||
if ( ! primitive.isPrimitive() ) {
|
||||
if ( !primitive.isPrimitive() ) {
|
||||
throw new AssertionFailure( "Passed type for which to locate wrapper equivalent was not a primitive" );
|
||||
}
|
||||
|
||||
|
@ -448,23 +457,6 @@ public class SimpleValueBinder {
|
|||
throw new AssertionFailure( "Unexpected primitive type (VOID most likely) passed to getWrapperEquivalent" );
|
||||
}
|
||||
|
||||
private javax.persistence.EnumType getEnumType(XProperty property) {
|
||||
javax.persistence.EnumType enumType = null;
|
||||
if ( key ) {
|
||||
MapKeyEnumerated enumAnn = property.getAnnotation( MapKeyEnumerated.class );
|
||||
if ( enumAnn != null ) {
|
||||
enumType = enumAnn.value();
|
||||
}
|
||||
}
|
||||
else {
|
||||
Enumerated enumAnn = property.getAnnotation( Enumerated.class );
|
||||
if ( enumAnn != null ) {
|
||||
enumType = enumAnn.value();
|
||||
}
|
||||
}
|
||||
return enumType;
|
||||
}
|
||||
|
||||
private TemporalType getTemporalType(XProperty property) {
|
||||
if ( key ) {
|
||||
MapKeyTemporal ann = property.getAnnotation( MapKeyTemporal.class );
|
||||
|
@ -529,7 +521,7 @@ public class SimpleValueBinder {
|
|||
if ( columns[0].isNameDeferred() && !mappings.isInSecondPass() && referencedEntityName != null ) {
|
||||
mappings.addSecondPass(
|
||||
new PkDrivenByDefaultMapsIdSecondPass(
|
||||
referencedEntityName, ( Ejb3JoinColumn[] ) columns, simpleValue
|
||||
referencedEntityName, (Ejb3JoinColumn[]) columns, simpleValue
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -544,7 +536,7 @@ public class SimpleValueBinder {
|
|||
LOG.debugf( "Setting SimpleValue typeName for %s", propertyName );
|
||||
|
||||
if ( attributeConverterDefinition != null ) {
|
||||
if ( ! BinderHelper.isEmptyAnnotationValue( explicitType ) ) {
|
||||
if ( !BinderHelper.isEmptyAnnotationValue( explicitType ) ) {
|
||||
throw new AnnotationException(
|
||||
String.format(
|
||||
"AttributeConverter and explicit Type cannot be applied to same attribute [%s.%s];" +
|
||||
|
@ -557,8 +549,26 @@ public class SimpleValueBinder {
|
|||
simpleValue.setJpaAttributeConverterDefinition( attributeConverterDefinition );
|
||||
}
|
||||
else {
|
||||
String type = BinderHelper.isEmptyAnnotationValue( explicitType ) ? returnedClassName : explicitType;
|
||||
org.hibernate.mapping.TypeDef typeDef = mappings.getTypeDef( type );
|
||||
String type;
|
||||
org.hibernate.mapping.TypeDef typeDef;
|
||||
|
||||
if ( !BinderHelper.isEmptyAnnotationValue( explicitType ) ) {
|
||||
type = explicitType;
|
||||
typeDef = mappings.getTypeDef( type );
|
||||
}
|
||||
else {
|
||||
// try implicit type
|
||||
org.hibernate.mapping.TypeDef implicitTypeDef = mappings.getTypeDef( returnedClassName );
|
||||
if ( implicitTypeDef != null ) {
|
||||
typeDef = implicitTypeDef;
|
||||
type = returnedClassName;
|
||||
}
|
||||
else {
|
||||
typeDef = mappings.getTypeDef( defaultType );
|
||||
type = defaultType;
|
||||
}
|
||||
}
|
||||
|
||||
if ( typeDef != null ) {
|
||||
type = typeDef.getTypeClass();
|
||||
simpleValue.setTypeParameters( typeDef.getParameters() );
|
||||
|
@ -582,10 +592,43 @@ public class SimpleValueBinder {
|
|||
if ( timeStampVersionType != null ) {
|
||||
simpleValue.setTypeName( timeStampVersionType );
|
||||
}
|
||||
|
||||
if ( simpleValue.getTypeName() != null && simpleValue.getTypeName().length() > 0
|
||||
&& simpleValue.getMappings().getTypeResolver().basic( simpleValue.getTypeName() ) == null ) {
|
||||
try {
|
||||
Class typeClass = ReflectHelper.classForName( simpleValue.getTypeName() );
|
||||
|
||||
if ( typeClass != null && DynamicParameterizedType.class.isAssignableFrom( typeClass ) ) {
|
||||
Properties parameters = simpleValue.getTypeParameters();
|
||||
if ( parameters == null ) {
|
||||
parameters = new Properties();
|
||||
}
|
||||
parameters.put( DynamicParameterizedType.IS_DYNAMIC, Boolean.toString( true ) );
|
||||
parameters.put( DynamicParameterizedType.RETURNED_CLASS, returnedClassName );
|
||||
parameters.put( DynamicParameterizedType.IS_PRIMARY_KEY, Boolean.toString( key ) );
|
||||
|
||||
parameters.put( DynamicParameterizedType.ENTITY, persistentClassName );
|
||||
parameters.put( DynamicParameterizedType.PROPERTY, xproperty.getName() );
|
||||
parameters.put( DynamicParameterizedType.ACCESS_TYPE, accessType.getType() );
|
||||
simpleValue.setTypeParameters( parameters );
|
||||
}
|
||||
}
|
||||
catch ( ClassNotFoundException cnfe ) {
|
||||
throw new MappingException( "Could not determine type for: " + simpleValue.getTypeName(), cnfe );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void setKey(boolean key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
}
|
||||
public AccessType getAccessType() {
|
||||
return accessType;
|
||||
}
|
||||
|
||||
public void setAccessType(AccessType accessType) {
|
||||
this.accessType = accessType;
|
||||
}
|
||||
}
|
|
@ -402,7 +402,7 @@ public class TableBinder {
|
|||
"No property ref found while expected"
|
||||
);
|
||||
}
|
||||
Property synthProp = referencedEntity.getRecursiveProperty( referencedPropertyName );
|
||||
Property synthProp = referencedEntity.getReferencedProperty( referencedPropertyName );
|
||||
if ( synthProp == null ) {
|
||||
throw new AssertionFailure(
|
||||
"Cannot find synthProp: " + referencedEntity.getEntityName() + "." + referencedPropertyName
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -29,7 +29,7 @@ import org.hibernate.EntityMode;
|
|||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.PropertyValueException;
|
||||
import org.hibernate.bytecode.instrumentation.spi.LazyPropertyInitializer;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.type.CollectionType;
|
||||
|
@ -151,7 +151,7 @@ public final class Nullability {
|
|||
//check for all components values in the collection
|
||||
|
||||
CompositeType componentType = (CompositeType) collectionElementType;
|
||||
Iterator iter = CascadingAction.getLoadedElementsIterator( session, collectionType, value );
|
||||
Iterator iter = CascadingActions.getLoadedElementsIterator( session, collectionType, value );
|
||||
while ( iter.hasNext() ) {
|
||||
Object compValue = iter.next();
|
||||
if (compValue != null) {
|
||||
|
|
|
@ -76,7 +76,9 @@ import org.hibernate.persister.entity.EntityPersister;
|
|||
import org.hibernate.pretty.MessageHelper;
|
||||
import org.hibernate.proxy.HibernateProxy;
|
||||
import org.hibernate.proxy.LazyInitializer;
|
||||
import org.hibernate.sql.Select;
|
||||
import org.hibernate.tuple.ElementWrapper;
|
||||
import org.hibernate.type.CollectionType;
|
||||
|
||||
/**
|
||||
* A <strong>stateful</strong> implementation of the {@link PersistenceContext} contract meaning that we maintain this
|
||||
|
@ -775,6 +777,64 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
*/
|
||||
@Override
|
||||
public Object getCollectionOwner(Serializable key, CollectionPersister collectionPersister) throws MappingException {
|
||||
// todo : we really just need to add a split in the notions of:
|
||||
// 1) collection key
|
||||
// 2) collection owner key
|
||||
// these 2 are not always the same. Same is true in the case of ToOne associations with property-ref...
|
||||
final EntityPersister ownerPersister = collectionPersister.getOwnerEntityPersister();
|
||||
if ( ownerPersister.getIdentifierType().getReturnedClass().isInstance( key ) ) {
|
||||
return getEntity( session.generateEntityKey( key, collectionPersister.getOwnerEntityPersister() ) );
|
||||
}
|
||||
|
||||
// we have a property-ref type mapping for the collection key. But that could show up a few ways here...
|
||||
//
|
||||
// 1) The incoming key could be the entity itself...
|
||||
if ( ownerPersister.isInstance( key ) ) {
|
||||
final Serializable owenerId = ownerPersister.getIdentifier( key, session );
|
||||
if ( owenerId == null ) {
|
||||
return null;
|
||||
}
|
||||
return getEntity( session.generateEntityKey( owenerId, ownerPersister ) );
|
||||
}
|
||||
|
||||
final CollectionType collectionType = collectionPersister.getCollectionType();
|
||||
|
||||
// 2) The incoming key is most likely the collection key which we need to resolve to the owner key
|
||||
// find the corresponding owner instance
|
||||
// a) try by EntityUniqueKey
|
||||
if ( collectionType.getLHSPropertyName() != null ) {
|
||||
Object owner = getEntity(
|
||||
new EntityUniqueKey(
|
||||
ownerPersister.getEntityName(),
|
||||
collectionType.getLHSPropertyName(),
|
||||
key,
|
||||
collectionPersister.getKeyType(),
|
||||
ownerPersister.getEntityMode(),
|
||||
session.getFactory()
|
||||
)
|
||||
);
|
||||
if ( owner != null ) {
|
||||
return owner;
|
||||
}
|
||||
|
||||
// b) try by EntityKey, which means we need to resolve owner-key -> collection-key
|
||||
// IMPL NOTE : yes if we get here this impl is very non-performant, but PersistenceContext
|
||||
// was never designed to handle this case; adding that capability for real means splitting
|
||||
// the notions of:
|
||||
// 1) collection key
|
||||
// 2) collection owner key
|
||||
// these 2 are not always the same (same is true in the case of ToOne associations with
|
||||
// property-ref). That would require changes to (at least) CollectionEntry and quite
|
||||
// probably changes to how the sql for collection initializers are generated
|
||||
//
|
||||
// We could also possibly see if the referenced property is a natural id since we already have caching
|
||||
// in place of natural id snapshots. BUt really its better to just do it the right way ^^ if we start
|
||||
// going that route
|
||||
final Serializable ownerId = ownerPersister.getIdByUniqueKey( key, collectionType.getLHSPropertyName(), session );
|
||||
return getEntity( session.generateEntityKey( ownerId, ownerPersister ) );
|
||||
}
|
||||
|
||||
// as a last resort this is what the old code did...
|
||||
return getEntity( session.generateEntityKey( key, collectionPersister.getOwnerEntityPersister() ) );
|
||||
}
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
|
|||
boolean isClosed,
|
||||
List<ConnectionObserver> observers) {
|
||||
this.connectionReleaseMode = determineConnectionReleaseMode(
|
||||
jdbcServices, isUserSuppliedConnection, connectionReleaseMode
|
||||
jdbcConnectionAccess, isUserSuppliedConnection, connectionReleaseMode
|
||||
);
|
||||
this.jdbcServices = jdbcServices;
|
||||
this.jdbcConnectionAccess = jdbcConnectionAccess;
|
||||
|
@ -110,14 +110,14 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
|
|||
}
|
||||
|
||||
private static ConnectionReleaseMode determineConnectionReleaseMode(
|
||||
JdbcServices jdbcServices,
|
||||
JdbcConnectionAccess jdbcConnectionAccess,
|
||||
boolean isUserSuppliedConnection,
|
||||
ConnectionReleaseMode connectionReleaseMode) {
|
||||
if ( isUserSuppliedConnection ) {
|
||||
return ConnectionReleaseMode.ON_CLOSE;
|
||||
}
|
||||
else if ( connectionReleaseMode == ConnectionReleaseMode.AFTER_STATEMENT &&
|
||||
! jdbcServices.getConnectionProvider().supportsAggressiveRelease() ) {
|
||||
! jdbcConnectionAccess.supportsAggressiveRelease() ) {
|
||||
LOG.debug( "Connection provider reports to not support aggressive release; overriding" );
|
||||
return ConnectionReleaseMode.AFTER_TRANSACTION;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
*/
|
||||
package org.hibernate.engine.jdbc.spi;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
|
||||
import org.hibernate.dialect.Dialect;
|
||||
|
@ -44,13 +43,15 @@ public interface JdbcServices extends Service {
|
|||
* Obtain service for providing JDBC connections.
|
||||
*
|
||||
* @return The connection provider.
|
||||
*
|
||||
* @deprecated See deprecation notice on {@link org.hibernate.engine.spi.SessionFactoryImplementor#getConnectionProvider()}
|
||||
* for details
|
||||
*/
|
||||
@Deprecated
|
||||
public ConnectionProvider getConnectionProvider();
|
||||
|
||||
/**
|
||||
* Obtain the dialect of the database to which {@link Connection connections} from
|
||||
* {@link #getConnectionProvider()} point.
|
||||
* Obtain the dialect of the database.
|
||||
*
|
||||
* @return The database dialect.
|
||||
*/
|
||||
|
|
|
@ -24,20 +24,16 @@
|
|||
package org.hibernate.engine.spi;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
|
||||
/**
|
||||
* A contract for defining the aspects of cascading various persistence actions.
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
*
|
||||
* @see CascadingAction
|
||||
*/
|
||||
public abstract class CascadeStyle implements Serializable {
|
||||
|
||||
public interface CascadeStyle extends Serializable {
|
||||
/**
|
||||
* For this style, should the given action be cascaded?
|
||||
*
|
||||
|
@ -45,7 +41,7 @@ public abstract class CascadeStyle implements Serializable {
|
|||
*
|
||||
* @return True if the action should be cascaded under this style; false otherwise.
|
||||
*/
|
||||
public abstract boolean doCascade(CascadingAction action);
|
||||
public boolean doCascade(CascadingAction action);
|
||||
|
||||
/**
|
||||
* Probably more aptly named something like doCascadeToCollectionElements(); it is
|
||||
|
@ -61,9 +57,7 @@ public abstract class CascadeStyle implements Serializable {
|
|||
* @return True if the action should be really cascaded under this style;
|
||||
* false otherwise.
|
||||
*/
|
||||
public boolean reallyDoCascade(CascadingAction action) {
|
||||
return doCascade( action );
|
||||
}
|
||||
public boolean reallyDoCascade(CascadingAction action);
|
||||
|
||||
/**
|
||||
* Do we need to delete orphaned collection elements?
|
||||
|
@ -71,252 +65,5 @@ public abstract class CascadeStyle implements Serializable {
|
|||
* @return True if this style need to account for orphan delete
|
||||
* operations; false otherwise.
|
||||
*/
|
||||
public boolean hasOrphanDelete() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static final class MultipleCascadeStyle extends CascadeStyle {
|
||||
private final CascadeStyle[] styles;
|
||||
|
||||
public MultipleCascadeStyle(CascadeStyle[] styles) {
|
||||
this.styles = styles;
|
||||
}
|
||||
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
for ( CascadeStyle style : styles ) {
|
||||
if ( style.doCascade( action ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean reallyDoCascade(CascadingAction action) {
|
||||
for ( CascadeStyle style : styles ) {
|
||||
if ( style.reallyDoCascade( action ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean hasOrphanDelete() {
|
||||
for ( CascadeStyle style : styles ) {
|
||||
if ( style.hasOrphanDelete() ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return ArrayHelper.toString( styles );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* save / delete / update / evict / lock / replicate / merge / persist + delete orphans
|
||||
*/
|
||||
public static final CascadeStyle ALL_DELETE_ORPHAN = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean hasOrphanDelete() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_ALL_DELETE_ORPHAN";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* save / delete / update / evict / lock / replicate / merge / persist
|
||||
*/
|
||||
public static final CascadeStyle ALL = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_ALL";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* save / update
|
||||
*/
|
||||
public static final CascadeStyle UPDATE = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.SAVE_UPDATE;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_SAVE_UPDATE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* lock
|
||||
*/
|
||||
public static final CascadeStyle LOCK = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.LOCK;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_LOCK";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* refresh
|
||||
*/
|
||||
public static final CascadeStyle REFRESH = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.REFRESH;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_REFRESH";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* evict
|
||||
*/
|
||||
public static final CascadeStyle EVICT = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.EVICT;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_EVICT";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* replicate
|
||||
*/
|
||||
public static final CascadeStyle REPLICATE = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.REPLICATE;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_REPLICATE";
|
||||
}
|
||||
};
|
||||
/**
|
||||
* merge
|
||||
*/
|
||||
public static final CascadeStyle MERGE = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.MERGE;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_MERGE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* create
|
||||
*/
|
||||
public static final CascadeStyle PERSIST = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.PERSIST
|
||||
|| action == CascadingAction.PERSIST_ON_FLUSH;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_PERSIST";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* delete
|
||||
*/
|
||||
public static final CascadeStyle DELETE = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.DELETE;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_DELETE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* delete + delete orphans
|
||||
*/
|
||||
public static final CascadeStyle DELETE_ORPHAN = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingAction.DELETE || action == CascadingAction.SAVE_UPDATE;
|
||||
}
|
||||
|
||||
public boolean reallyDoCascade(CascadingAction action) {
|
||||
return action == CascadingAction.DELETE;
|
||||
}
|
||||
|
||||
public boolean hasOrphanDelete() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_DELETE_ORPHAN";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* no cascades
|
||||
*/
|
||||
public static final CascadeStyle NONE = new CascadeStyle() {
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "STYLE_NONE";
|
||||
}
|
||||
};
|
||||
|
||||
public CascadeStyle() {
|
||||
}
|
||||
|
||||
static final Map<String, CascadeStyle> STYLES = new HashMap<String, CascadeStyle>();
|
||||
|
||||
static {
|
||||
STYLES.put( "all", ALL );
|
||||
STYLES.put( "all-delete-orphan", ALL_DELETE_ORPHAN );
|
||||
STYLES.put( "save-update", UPDATE );
|
||||
STYLES.put( "persist", PERSIST );
|
||||
STYLES.put( "merge", MERGE );
|
||||
STYLES.put( "lock", LOCK );
|
||||
STYLES.put( "refresh", REFRESH );
|
||||
STYLES.put( "replicate", REPLICATE );
|
||||
STYLES.put( "evict", EVICT );
|
||||
STYLES.put( "delete", DELETE );
|
||||
STYLES.put( "remove", DELETE ); // adds remove as a sort-of alias for delete...
|
||||
STYLES.put( "delete-orphan", DELETE_ORPHAN );
|
||||
STYLES.put( "none", NONE );
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method for obtaining named cascade styles
|
||||
*
|
||||
* @param cascade The named cascade style name.
|
||||
*
|
||||
* @return The appropriate CascadeStyle
|
||||
*/
|
||||
public static CascadeStyle getCascadeStyle(String cascade) {
|
||||
CascadeStyle style = STYLES.get( cascade );
|
||||
if ( style == null ) {
|
||||
throw new MappingException( "Unsupported cascade style: " + cascade );
|
||||
}
|
||||
else {
|
||||
return style;
|
||||
}
|
||||
}
|
||||
public boolean hasOrphanDelete();
|
||||
}
|
||||
|
|
|
@ -0,0 +1,348 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.engine.spi;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class CascadeStyles {
|
||||
private static final Logger log = Logger.getLogger( CascadeStyles.class );
|
||||
|
||||
/**
|
||||
* Disallow instantiation
|
||||
*/
|
||||
private CascadeStyles() {
|
||||
}
|
||||
|
||||
/**
|
||||
* save / delete / update / evict / lock / replicate / merge / persist + delete orphans
|
||||
*/
|
||||
public static final CascadeStyle ALL_DELETE_ORPHAN = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOrphanDelete() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_ALL_DELETE_ORPHAN";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* save / delete / update / evict / lock / replicate / merge / persist
|
||||
*/
|
||||
public static final CascadeStyle ALL = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_ALL";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* save / update
|
||||
*/
|
||||
public static final CascadeStyle UPDATE = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.SAVE_UPDATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_SAVE_UPDATE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* lock
|
||||
*/
|
||||
public static final CascadeStyle LOCK = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.LOCK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_LOCK";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* refresh
|
||||
*/
|
||||
public static final CascadeStyle REFRESH = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.REFRESH;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_REFRESH";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* evict
|
||||
*/
|
||||
public static final CascadeStyle EVICT = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.EVICT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_EVICT";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* replicate
|
||||
*/
|
||||
public static final CascadeStyle REPLICATE = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.REPLICATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_REPLICATE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* merge
|
||||
*/
|
||||
public static final CascadeStyle MERGE = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.MERGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_MERGE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* create
|
||||
*/
|
||||
public static final CascadeStyle PERSIST = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.PERSIST
|
||||
|| action == CascadingActions.PERSIST_ON_FLUSH;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_PERSIST";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* delete
|
||||
*/
|
||||
public static final CascadeStyle DELETE = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.DELETE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_DELETE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* delete + delete orphans
|
||||
*/
|
||||
public static final CascadeStyle DELETE_ORPHAN = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return action == CascadingActions.DELETE || action == CascadingActions.SAVE_UPDATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean reallyDoCascade(CascadingAction action) {
|
||||
return action == CascadingActions.DELETE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOrphanDelete() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_DELETE_ORPHAN";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* no cascades
|
||||
*/
|
||||
public static final CascadeStyle NONE = new BaseCascadeStyle() {
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "STYLE_NONE";
|
||||
}
|
||||
};
|
||||
|
||||
private static final Map<String, CascadeStyle> STYLES = buildBaseCascadeStyleMap();
|
||||
|
||||
private static Map<String, CascadeStyle> buildBaseCascadeStyleMap() {
|
||||
final HashMap<String, CascadeStyle> base = new HashMap<String, CascadeStyle>();
|
||||
|
||||
base.put( "all", ALL );
|
||||
base.put( "all-delete-orphan", ALL_DELETE_ORPHAN );
|
||||
base.put( "save-update", UPDATE );
|
||||
base.put( "persist", PERSIST );
|
||||
base.put( "merge", MERGE );
|
||||
base.put( "lock", LOCK );
|
||||
base.put( "refresh", REFRESH );
|
||||
base.put( "replicate", REPLICATE );
|
||||
base.put( "evict", EVICT );
|
||||
base.put( "delete", DELETE );
|
||||
base.put( "remove", DELETE ); // adds remove as a sort-of alias for delete...
|
||||
base.put( "delete-orphan", DELETE_ORPHAN );
|
||||
base.put( "none", NONE );
|
||||
|
||||
return base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method for obtaining named cascade styles
|
||||
*
|
||||
* @param cascade The named cascade style name.
|
||||
*
|
||||
* @return The appropriate CascadeStyle
|
||||
*/
|
||||
public static CascadeStyle getCascadeStyle(String cascade) {
|
||||
CascadeStyle style = STYLES.get( cascade );
|
||||
if ( style == null ) {
|
||||
throw new MappingException( "Unsupported cascade style: " + cascade );
|
||||
}
|
||||
else {
|
||||
return style;
|
||||
}
|
||||
}
|
||||
|
||||
public static void registerCascadeStyle(String name, BaseCascadeStyle cascadeStyle) {
|
||||
log.tracef( "Registering external cascade style [%s : %s]", name, cascadeStyle );
|
||||
final CascadeStyle old = STYLES.put( name, cascadeStyle );
|
||||
if ( old != null ) {
|
||||
log.debugf(
|
||||
"External cascade style regsitration [%s : %s] overrode base registration [%s]",
|
||||
name,
|
||||
cascadeStyle,
|
||||
old
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static abstract class BaseCascadeStyle implements CascadeStyle {
|
||||
@Override
|
||||
public boolean reallyDoCascade(CascadingAction action) {
|
||||
return doCascade( action );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOrphanDelete() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static final class MultipleCascadeStyle extends BaseCascadeStyle {
|
||||
private final CascadeStyle[] styles;
|
||||
|
||||
public MultipleCascadeStyle(CascadeStyle[] styles) {
|
||||
this.styles = styles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doCascade(CascadingAction action) {
|
||||
for ( CascadeStyle style : styles ) {
|
||||
if ( style.doCascade( action ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean reallyDoCascade(CascadingAction action) {
|
||||
for ( CascadeStyle style : styles ) {
|
||||
if ( style.reallyDoCascade( action ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOrphanDelete() {
|
||||
for ( CascadeStyle style : styles ) {
|
||||
if ( style.hasOrphanDelete() ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return ArrayHelper.toString( styles );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -24,40 +24,19 @@
|
|||
package org.hibernate.engine.spi;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.ReplicationMode;
|
||||
import org.hibernate.TransientPropertyValueException;
|
||||
import org.hibernate.collection.spi.PersistentCollection;
|
||||
import org.hibernate.engine.internal.ForeignKeys;
|
||||
import org.hibernate.event.spi.EventSource;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.proxy.HibernateProxy;
|
||||
import org.hibernate.type.CollectionType;
|
||||
import org.hibernate.type.EntityType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
/**
|
||||
* A session action that may be cascaded from parent entity to its children
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public abstract class CascadingAction {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, CascadingAction.class.getName());
|
||||
|
||||
|
||||
// the CascadingAction contract ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
public CascadingAction() {
|
||||
}
|
||||
public interface CascadingAction {
|
||||
|
||||
/**
|
||||
* Cascade the action to the child object.
|
||||
|
@ -70,7 +49,7 @@ public abstract class CascadingAction {
|
|||
* @param isCascadeDeleteEnabled Are cascading deletes enabled.
|
||||
* @throws HibernateException
|
||||
*/
|
||||
public abstract void cascade(
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
|
@ -86,7 +65,7 @@ public abstract class CascadingAction {
|
|||
* @param collection The collection instance.
|
||||
* @return The children iterator.
|
||||
*/
|
||||
public abstract Iterator getCascadableChildrenIterator(
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection);
|
||||
|
@ -96,7 +75,7 @@ public abstract class CascadingAction {
|
|||
*
|
||||
* @return True if this action can lead to deletions of orphans.
|
||||
*/
|
||||
public abstract boolean deleteOrphans();
|
||||
public boolean deleteOrphans();
|
||||
|
||||
|
||||
/**
|
||||
|
@ -104,9 +83,7 @@ public abstract class CascadingAction {
|
|||
*
|
||||
* @return True if this action requires no-cascade verification; false otherwise.
|
||||
*/
|
||||
public boolean requiresNoCascadeChecking() {
|
||||
return false;
|
||||
}
|
||||
public boolean requiresNoCascadeChecking();
|
||||
|
||||
/**
|
||||
* Called (in the case of {@link #requiresNoCascadeChecking} returning true) to validate
|
||||
|
@ -118,357 +95,10 @@ public abstract class CascadingAction {
|
|||
* @param persister The entity persister for the owner
|
||||
* @param propertyIndex The index of the property within the owner.
|
||||
*/
|
||||
public void noCascade(EventSource session, Object child, Object parent, EntityPersister persister, int propertyIndex) {
|
||||
}
|
||||
public void noCascade(EventSource session, Object child, Object parent, EntityPersister persister, int propertyIndex);
|
||||
|
||||
/**
|
||||
* Should this action be performed (or noCascade consulted) in the case of lazy properties.
|
||||
*/
|
||||
public boolean performOnLazyProperty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// the CascadingAction implementations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#delete(Object)
|
||||
*/
|
||||
public static final CascadingAction DELETE = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to delete: {0}", entityName );
|
||||
session.delete( entityName, child, isCascadeDeleteEnabled, ( Set ) anything );
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// delete does cascade to uninitialized collections
|
||||
return CascadingAction.getAllElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
// orphans should be deleted during delete
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_DELETE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#lock(Object, LockMode)
|
||||
*/
|
||||
public static final CascadingAction LOCK = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to lock: {0}", entityName );
|
||||
LockMode lockMode = LockMode.NONE;
|
||||
LockOptions lr = new LockOptions();
|
||||
if ( anything instanceof LockOptions) {
|
||||
LockOptions lockOptions = (LockOptions)anything;
|
||||
lr.setTimeOut(lockOptions.getTimeOut());
|
||||
lr.setScope( lockOptions.getScope());
|
||||
if ( lockOptions.getScope() == true ) // cascade specified lockMode
|
||||
lockMode = lockOptions.getLockMode();
|
||||
}
|
||||
lr.setLockMode(lockMode);
|
||||
session.buildLockRequest(lr).lock(entityName, child);
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// lock doesn't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
//TODO: should orphans really be deleted during lock???
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_LOCK";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#refresh(Object)
|
||||
*/
|
||||
public static final CascadingAction REFRESH = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to refresh: {0}", entityName );
|
||||
session.refresh( child, (Map) anything );
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// refresh doesn't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_REFRESH";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#evict(Object)
|
||||
*/
|
||||
public static final CascadingAction EVICT = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to evict: {0}", entityName );
|
||||
session.evict(child);
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// evicts don't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_EVICT";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#saveOrUpdate(Object)
|
||||
*/
|
||||
public static final CascadingAction SAVE_UPDATE = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to save or update: {0}", entityName );
|
||||
session.saveOrUpdate(entityName, child);
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// saves / updates don't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
// orphans should be deleted during save/update
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_SAVE_UPDATE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#merge(Object)
|
||||
*/
|
||||
public static final CascadingAction MERGE = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to merge: {0}", entityName );
|
||||
session.merge( entityName, child, (Map) anything );
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// merges don't cascade to uninitialized collections
|
||||
// //TODO: perhaps this does need to cascade after all....
|
||||
return getLoadedElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
// orphans should not be deleted during merge??
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_MERGE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#persist(Object)
|
||||
*/
|
||||
public static final CascadingAction PERSIST = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to persist: {0}" + entityName );
|
||||
session.persist( entityName, child, (Map) anything );
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// persists don't cascade to uninitialized collections
|
||||
return CascadingAction.getAllElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_PERSIST";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute persist during flush time
|
||||
*
|
||||
* @see org.hibernate.Session#persist(Object)
|
||||
*/
|
||||
public static final CascadingAction PERSIST_ON_FLUSH = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to persist on flush: {0}", entityName );
|
||||
session.persistOnFlush( entityName, child, (Map) anything );
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// persists don't cascade to uninitialized collections
|
||||
return CascadingAction.getLoadedElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public boolean requiresNoCascadeChecking() {
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public void noCascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
Object parent,
|
||||
EntityPersister persister,
|
||||
int propertyIndex) {
|
||||
if ( child == null ) {
|
||||
return;
|
||||
}
|
||||
Type type = persister.getPropertyTypes()[propertyIndex];
|
||||
if ( type.isEntityType() ) {
|
||||
String childEntityName = ( ( EntityType ) type ).getAssociatedEntityName( session.getFactory() );
|
||||
|
||||
if ( ! isInManagedState( child, session )
|
||||
&& ! ( child instanceof HibernateProxy ) //a proxy cannot be transient and it breaks ForeignKeys.isTransient
|
||||
&& ForeignKeys.isTransient( childEntityName, child, null, session ) ) {
|
||||
String parentEntiytName = persister.getEntityName();
|
||||
String propertyName = persister.getPropertyNames()[propertyIndex];
|
||||
throw new TransientPropertyValueException(
|
||||
"object references an unsaved transient instance - save the transient instance before flushing",
|
||||
childEntityName,
|
||||
parentEntiytName,
|
||||
propertyName
|
||||
);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isInManagedState(Object child, EventSource session) {
|
||||
EntityEntry entry = session.getPersistenceContext().getEntry( child );
|
||||
return entry != null &&
|
||||
(
|
||||
entry.getStatus() == Status.MANAGED ||
|
||||
entry.getStatus() == Status.READ_ONLY ||
|
||||
entry.getStatus() == Status.SAVING
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_PERSIST_ON_FLUSH";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#replicate(Object, org.hibernate.ReplicationMode)
|
||||
*/
|
||||
public static final CascadingAction REPLICATE = new CascadingAction() {
|
||||
@Override
|
||||
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to replicate: {0}", entityName );
|
||||
session.replicate( entityName, child, (ReplicationMode) anything );
|
||||
}
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
|
||||
// replicate does cascade to uninitialized collections
|
||||
return getLoadedElementsIterator(session, collectionType, collection);
|
||||
}
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false; //I suppose?
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_REPLICATE";
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// static helper methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
/**
|
||||
* Given a collection, get an iterator of all its children, loading them
|
||||
* from the database if necessary.
|
||||
*
|
||||
* @param session The session within which the cascade is occuring.
|
||||
* @param collectionType The mapping type of the collection.
|
||||
* @param collection The collection instance.
|
||||
* @return The children iterator.
|
||||
*/
|
||||
private static Iterator getAllElementsIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
return collectionType.getElementsIterator( collection, session );
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate just the elements of the collection that are already there. Don't load
|
||||
* any new elements from the database.
|
||||
*/
|
||||
public static Iterator getLoadedElementsIterator(SessionImplementor session, CollectionType collectionType, Object collection) {
|
||||
if ( collectionIsInitialized(collection) ) {
|
||||
// handles arrays and newly instantiated collections
|
||||
return collectionType.getElementsIterator(collection, session);
|
||||
}
|
||||
else {
|
||||
// does not handle arrays (thats ok, cos they can't be lazy)
|
||||
// or newly instantiated collections, so we can do the cast
|
||||
return ( (PersistentCollection) collection ).queuedAdditionIterator();
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean collectionIsInitialized(Object collection) {
|
||||
return !(collection instanceof PersistentCollection) || ( (PersistentCollection) collection ).wasInitialized();
|
||||
}
|
||||
public boolean performOnLazyProperty();
|
||||
}
|
|
@ -0,0 +1,521 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.engine.spi;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.ReplicationMode;
|
||||
import org.hibernate.TransientPropertyValueException;
|
||||
import org.hibernate.collection.spi.PersistentCollection;
|
||||
import org.hibernate.engine.internal.ForeignKeys;
|
||||
import org.hibernate.event.spi.EventSource;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.proxy.HibernateProxy;
|
||||
import org.hibernate.type.CollectionType;
|
||||
import org.hibernate.type.EntityType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class CascadingActions {
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
|
||||
CoreMessageLogger.class,
|
||||
CascadingAction.class.getName()
|
||||
);
|
||||
|
||||
/**
|
||||
* Disallow instantiation
|
||||
*/
|
||||
private CascadingActions() {
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#delete(Object)
|
||||
*/
|
||||
public static final CascadingAction DELETE = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled) {
|
||||
LOG.tracev( "Cascading to delete: {0}", entityName );
|
||||
session.delete( entityName, child, isCascadeDeleteEnabled, (Set) anything );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// delete does cascade to uninitialized collections
|
||||
return getAllElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
// orphans should be deleted during delete
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_DELETE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#lock(Object, org.hibernate.LockMode)
|
||||
*/
|
||||
public static final CascadingAction LOCK = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled) {
|
||||
LOG.tracev( "Cascading to lock: {0}", entityName );
|
||||
LockMode lockMode = LockMode.NONE;
|
||||
LockOptions lr = new LockOptions();
|
||||
if ( anything instanceof LockOptions ) {
|
||||
LockOptions lockOptions = (LockOptions) anything;
|
||||
lr.setTimeOut( lockOptions.getTimeOut() );
|
||||
lr.setScope( lockOptions.getScope() );
|
||||
if ( lockOptions.getScope() ) {
|
||||
lockMode = lockOptions.getLockMode();
|
||||
}
|
||||
}
|
||||
lr.setLockMode( lockMode );
|
||||
session.buildLockRequest( lr ).lock( entityName, child );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// lock doesn't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
//TODO: should orphans really be deleted during lock???
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_LOCK";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#refresh(Object)
|
||||
*/
|
||||
public static final CascadingAction REFRESH = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to refresh: {0}", entityName );
|
||||
session.refresh( child, (Map) anything );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// refresh doesn't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_REFRESH";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#evict(Object)
|
||||
*/
|
||||
public static final CascadingAction EVICT = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to evict: {0}", entityName );
|
||||
session.evict( child );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// evicts don't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_EVICT";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#saveOrUpdate(Object)
|
||||
*/
|
||||
public static final CascadingAction SAVE_UPDATE = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to save or update: {0}", entityName );
|
||||
session.saveOrUpdate( entityName, child );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// saves / updates don't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
// orphans should be deleted during save/update
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_SAVE_UPDATE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#merge(Object)
|
||||
*/
|
||||
public static final CascadingAction MERGE = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to merge: {0}", entityName );
|
||||
session.merge( entityName, child, (Map) anything );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// merges don't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
// orphans should not be deleted during merge??
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_MERGE";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#persist(Object)
|
||||
*/
|
||||
public static final CascadingAction PERSIST = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to persist: {0}" + entityName );
|
||||
session.persist( entityName, child, (Map) anything );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// persists don't cascade to uninitialized collections
|
||||
return getAllElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_PERSIST";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute persist during flush time
|
||||
*
|
||||
* @see org.hibernate.Session#persist(Object)
|
||||
*/
|
||||
public static final CascadingAction PERSIST_ON_FLUSH = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to persist on flush: {0}", entityName );
|
||||
session.persistOnFlush( entityName, child, (Map) anything );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// persists don't cascade to uninitialized collections
|
||||
return getLoadedElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean requiresNoCascadeChecking() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void noCascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
Object parent,
|
||||
EntityPersister persister,
|
||||
int propertyIndex) {
|
||||
if ( child == null ) {
|
||||
return;
|
||||
}
|
||||
Type type = persister.getPropertyTypes()[propertyIndex];
|
||||
if ( type.isEntityType() ) {
|
||||
String childEntityName = ((EntityType) type).getAssociatedEntityName( session.getFactory() );
|
||||
|
||||
if ( !isInManagedState( child, session )
|
||||
&& !(child instanceof HibernateProxy) //a proxy cannot be transient and it breaks ForeignKeys.isTransient
|
||||
&& ForeignKeys.isTransient( childEntityName, child, null, session ) ) {
|
||||
String parentEntiytName = persister.getEntityName();
|
||||
String propertyName = persister.getPropertyNames()[propertyIndex];
|
||||
throw new TransientPropertyValueException(
|
||||
"object references an unsaved transient instance - save the transient instance before flushing",
|
||||
childEntityName,
|
||||
parentEntiytName,
|
||||
propertyName
|
||||
);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isInManagedState(Object child, EventSource session) {
|
||||
EntityEntry entry = session.getPersistenceContext().getEntry( child );
|
||||
return entry != null &&
|
||||
(
|
||||
entry.getStatus() == Status.MANAGED ||
|
||||
entry.getStatus() == Status.READ_ONLY ||
|
||||
entry.getStatus() == Status.SAVING
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_PERSIST_ON_FLUSH";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @see org.hibernate.Session#replicate
|
||||
*/
|
||||
public static final CascadingAction REPLICATE = new BaseCascadingAction() {
|
||||
@Override
|
||||
public void cascade(
|
||||
EventSource session,
|
||||
Object child,
|
||||
String entityName,
|
||||
Object anything,
|
||||
boolean isCascadeDeleteEnabled)
|
||||
throws HibernateException {
|
||||
LOG.tracev( "Cascading to replicate: {0}", entityName );
|
||||
session.replicate( entityName, child, (ReplicationMode) anything );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getCascadableChildrenIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
// replicate does cascade to uninitialized collections
|
||||
return getLoadedElementsIterator( session, collectionType, collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deleteOrphans() {
|
||||
return false; //I suppose?
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ACTION_REPLICATE";
|
||||
}
|
||||
};
|
||||
|
||||
public abstract static class BaseCascadingAction implements CascadingAction {
|
||||
@Override
|
||||
public boolean requiresNoCascadeChecking() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void noCascade(EventSource session, Object child, Object parent, EntityPersister persister, int propertyIndex) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean performOnLazyProperty() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a collection, get an iterator of all its children, loading them
|
||||
* from the database if necessary.
|
||||
*
|
||||
* @param session The session within which the cascade is occuring.
|
||||
* @param collectionType The mapping type of the collection.
|
||||
* @param collection The collection instance.
|
||||
*
|
||||
* @return The children iterator.
|
||||
*/
|
||||
private static Iterator getAllElementsIterator(
|
||||
EventSource session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
return collectionType.getElementsIterator( collection, session );
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate just the elements of the collection that are already there. Don't load
|
||||
* any new elements from the database.
|
||||
*/
|
||||
public static Iterator getLoadedElementsIterator(
|
||||
SessionImplementor session,
|
||||
CollectionType collectionType,
|
||||
Object collection) {
|
||||
if ( collectionIsInitialized( collection ) ) {
|
||||
// handles arrays and newly instantiated collections
|
||||
return collectionType.getElementsIterator( collection, session );
|
||||
}
|
||||
else {
|
||||
// does not handle arrays (thats ok, cos they can't be lazy)
|
||||
// or newly instantiated collections, so we can do the cast
|
||||
return ((PersistentCollection) collection).queuedAdditionIterator();
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean collectionIsInitialized(Object collection) {
|
||||
return !(collection instanceof PersistentCollection) || ((PersistentCollection) collection).wasInitialized();
|
||||
}
|
||||
}
|
|
@ -182,6 +182,9 @@ public final class CollectionEntry implements Serializable {
|
|||
}
|
||||
|
||||
public void preFlush(PersistentCollection collection) throws HibernateException {
|
||||
if ( loadedKey == null && collection.getKey() != null ) {
|
||||
loadedKey = collection.getKey();
|
||||
}
|
||||
|
||||
boolean nonMutableChange = collection.isDirty() &&
|
||||
getLoadedPersister()!=null &&
|
||||
|
|
|
@ -153,7 +153,13 @@ public interface SessionFactoryImplementor extends Mapping, SessionFactory {
|
|||
|
||||
/**
|
||||
* Get the connection provider
|
||||
*
|
||||
* @deprecated Access to connections via {@link org.hibernate.engine.jdbc.spi.JdbcConnectionAccess} should
|
||||
* be preferred over access via {@link ConnectionProvider}, whenever possible.
|
||||
* {@link org.hibernate.engine.jdbc.spi.JdbcConnectionAccess} is tied to the Hibernate Session to
|
||||
* properly account for contextual information. See {@link SessionImplementor#getJdbcConnectionAccess()}
|
||||
*/
|
||||
@Deprecated
|
||||
public ConnectionProvider getConnectionProvider();
|
||||
/**
|
||||
* Get the names of all persistent classes that implement/extend the given interface/class
|
||||
|
|
|
@ -41,13 +41,17 @@ import org.hibernate.service.spi.BasicServiceInitiator;
|
|||
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
||||
|
||||
/**
|
||||
* Standard instantiator for the standard {@link TransactionFactory} service.
|
||||
* Standard initiator for {@link TransactionFactory} service.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class TransactionFactoryInitiator<T extends TransactionImplementor> implements BasicServiceInitiator<TransactionFactory> {
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
|
||||
TransactionFactoryInitiator.class.getName());
|
||||
public class TransactionFactoryInitiator<T extends TransactionImplementor>
|
||||
implements BasicServiceInitiator<TransactionFactory> {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
|
||||
CoreMessageLogger.class,
|
||||
TransactionFactoryInitiator.class.getName()
|
||||
);
|
||||
|
||||
public static final TransactionFactoryInitiator INSTANCE = new TransactionFactoryInitiator();
|
||||
|
||||
|
@ -87,7 +91,7 @@ public class TransactionFactoryInitiator<T extends TransactionImplementor> imple
|
|||
}
|
||||
}
|
||||
else {
|
||||
final String strategyClassName = mapLegacyNames( strategy.toString() );
|
||||
final String strategyClassName = mapName( strategy.toString() );
|
||||
LOG.transactionStrategy( strategyClassName );
|
||||
|
||||
try {
|
||||
|
@ -111,7 +115,8 @@ public class TransactionFactoryInitiator<T extends TransactionImplementor> imple
|
|||
}
|
||||
}
|
||||
|
||||
private String mapLegacyNames(String name) {
|
||||
private String mapName(String name) {
|
||||
// check legacy names ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
if ( "org.hibernate.transaction.JDBCTransactionFactory".equals( name ) ) {
|
||||
return JdbcTransactionFactory.class.getName();
|
||||
}
|
||||
|
@ -124,6 +129,20 @@ public class TransactionFactoryInitiator<T extends TransactionImplementor> imple
|
|||
return CMTTransactionFactory.class.getName();
|
||||
}
|
||||
|
||||
// check short names ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
if ( JdbcTransactionFactory.SHORT_NAME.endsWith( name ) ) {
|
||||
return JdbcTransactionFactory.class.getName();
|
||||
}
|
||||
|
||||
if ( JtaTransactionFactory.SHORT_NAME.equals( name ) ) {
|
||||
return JtaTransactionFactory.class.getName();
|
||||
}
|
||||
|
||||
if ( CMTTransactionFactory.SHORT_NAME.equals( name ) ) {
|
||||
return CMTTransactionFactory.class.getName();
|
||||
}
|
||||
|
||||
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.hibernate.engine.transaction.spi.TransactionCoordinator;
|
|||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.jdbc.WorkExecutor;
|
||||
import org.hibernate.jdbc.WorkExecutorVisitable;
|
||||
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
|
||||
import org.hibernate.service.jdbc.connections.spi.JdbcConnectionAccess;
|
||||
|
||||
/**
|
||||
* The isolation delegate for JDBC {@link Connection} based transactions
|
||||
|
@ -52,8 +52,8 @@ public class JdbcIsolationDelegate implements IsolationDelegate {
|
|||
this.transactionCoordinator = transactionCoordinator;
|
||||
}
|
||||
|
||||
protected ConnectionProvider connectionProvider() {
|
||||
return transactionCoordinator.getJdbcCoordinator().getLogicalConnection().getJdbcServices().getConnectionProvider();
|
||||
protected JdbcConnectionAccess jdbcConnectionAccess() {
|
||||
return transactionCoordinator.getTransactionContext().getJdbcConnectionAccess();
|
||||
}
|
||||
|
||||
protected SqlExceptionHelper sqlExceptionHelper() {
|
||||
|
@ -65,7 +65,7 @@ public class JdbcIsolationDelegate implements IsolationDelegate {
|
|||
boolean wasAutoCommit = false;
|
||||
try {
|
||||
// todo : should we use a connection proxy here?
|
||||
Connection connection = connectionProvider().getConnection();
|
||||
Connection connection = jdbcConnectionAccess().obtainConnection();
|
||||
try {
|
||||
if ( transacted ) {
|
||||
if ( connection.getAutoCommit() ) {
|
||||
|
@ -112,7 +112,7 @@ public class JdbcIsolationDelegate implements IsolationDelegate {
|
|||
}
|
||||
}
|
||||
try {
|
||||
connectionProvider().closeConnection( connection );
|
||||
jdbcConnectionAccess().releaseConnection( connection );
|
||||
}
|
||||
catch ( Exception ignore ) {
|
||||
LOG.unableToReleaseIsolatedConnection( ignore );
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.hibernate.engine.transaction.spi.TransactionFactory;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public final class JdbcTransactionFactory implements TransactionFactory<JdbcTransaction> {
|
||||
public static final String SHORT_NAME = "jdbc";
|
||||
|
||||
@Override
|
||||
public JdbcTransaction createTransaction(TransactionCoordinator transactionCoordinator) {
|
||||
return new JdbcTransaction( transactionCoordinator );
|
||||
|
|
|
@ -37,6 +37,8 @@ import org.hibernate.engine.transaction.spi.TransactionFactory;
|
|||
* @author Gavin King
|
||||
*/
|
||||
public class CMTTransactionFactory implements TransactionFactory<CMTTransaction> {
|
||||
public static final String SHORT_NAME = "cmt";
|
||||
|
||||
@Override
|
||||
public CMTTransaction createTransaction(TransactionCoordinator transactionCoordinator) {
|
||||
return new CMTTransaction( transactionCoordinator );
|
||||
|
|
|
@ -23,12 +23,12 @@
|
|||
*/
|
||||
package org.hibernate.engine.transaction.internal.jta;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import javax.transaction.NotSupportedException;
|
||||
import javax.transaction.SystemException;
|
||||
import javax.transaction.Transaction;
|
||||
import javax.transaction.TransactionManager;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
|
@ -39,7 +39,7 @@ import org.hibernate.engine.transaction.spi.TransactionCoordinator;
|
|||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.jdbc.WorkExecutor;
|
||||
import org.hibernate.jdbc.WorkExecutorVisitable;
|
||||
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
|
||||
import org.hibernate.service.jdbc.connections.spi.JdbcConnectionAccess;
|
||||
|
||||
/**
|
||||
* An isolation delegate for JTA environments.
|
||||
|
@ -63,11 +63,8 @@ public class JtaIsolationDelegate implements IsolationDelegate {
|
|||
.retrieveTransactionManager();
|
||||
}
|
||||
|
||||
protected ConnectionProvider connectionProvider() {
|
||||
return transactionCoordinator.getTransactionContext()
|
||||
.getTransactionEnvironment()
|
||||
.getJdbcServices()
|
||||
.getConnectionProvider();
|
||||
protected JdbcConnectionAccess jdbcConnectionAccess() {
|
||||
return transactionCoordinator.getTransactionContext().getJdbcConnectionAccess();
|
||||
}
|
||||
|
||||
protected SqlExceptionHelper sqlExceptionHelper() {
|
||||
|
@ -120,15 +117,15 @@ public class JtaIsolationDelegate implements IsolationDelegate {
|
|||
}
|
||||
|
||||
private <T> T doTheWorkInNewTransaction(WorkExecutorVisitable<T> work, TransactionManager transactionManager) {
|
||||
T result = null;
|
||||
try {
|
||||
// start the new isolated transaction
|
||||
transactionManager.begin();
|
||||
|
||||
try {
|
||||
result = doTheWork( work );
|
||||
T result = doTheWork( work );
|
||||
// if everything went ok, commit the isolated transaction
|
||||
transactionManager.commit();
|
||||
return result;
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
try {
|
||||
|
@ -146,7 +143,6 @@ public class JtaIsolationDelegate implements IsolationDelegate {
|
|||
catch ( NotSupportedException e ) {
|
||||
throw new HibernateException( "Unable to start isolated transaction", e );
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private <T> T doTheWorkInNoTransaction(WorkExecutorVisitable<T> work) {
|
||||
|
@ -156,7 +152,7 @@ public class JtaIsolationDelegate implements IsolationDelegate {
|
|||
private <T> T doTheWork(WorkExecutorVisitable<T> work) {
|
||||
try {
|
||||
// obtain our isolated connection
|
||||
Connection connection = connectionProvider().getConnection();
|
||||
Connection connection = jdbcConnectionAccess().obtainConnection();
|
||||
try {
|
||||
// do the actual work
|
||||
return work.accept( new WorkExecutor<T>(), connection );
|
||||
|
@ -170,7 +166,7 @@ public class JtaIsolationDelegate implements IsolationDelegate {
|
|||
finally {
|
||||
try {
|
||||
// no matter what, release the connection (handle)
|
||||
connectionProvider().closeConnection( connection );
|
||||
jdbcConnectionAccess().releaseConnection( connection );
|
||||
}
|
||||
catch ( Throwable ignore ) {
|
||||
LOG.unableToReleaseIsolatedConnection( ignore );
|
||||
|
|
|
@ -40,6 +40,8 @@ import org.hibernate.service.jta.platform.spi.JtaPlatform;
|
|||
* @author Les Hazlewood
|
||||
*/
|
||||
public class JtaTransactionFactory implements TransactionFactory<JtaTransaction> {
|
||||
public static final String SHORT_NAME = "jta";
|
||||
|
||||
@Override
|
||||
public JtaTransaction createTransaction(TransactionCoordinator transactionCoordinator) {
|
||||
return new JtaTransaction( transactionCoordinator );
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.hibernate.engine.internal.Cascade;
|
|||
import org.hibernate.engine.internal.Collections;
|
||||
import org.hibernate.engine.spi.ActionQueue;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.CollectionEntry;
|
||||
import org.hibernate.engine.spi.CollectionKey;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
|
@ -168,7 +169,7 @@ public abstract class AbstractFlushingEventListener implements Serializable {
|
|||
protected Object getAnything() { return null; }
|
||||
|
||||
protected CascadingAction getCascadingAction() {
|
||||
return CascadingAction.SAVE_UPDATE;
|
||||
return CascadingActions.SAVE_UPDATE;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.hibernate.classic.Lifecycle;
|
|||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.internal.ForeignKeys;
|
||||
import org.hibernate.engine.internal.Nullability;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.PersistenceContext;
|
||||
|
@ -321,7 +321,7 @@ public class DefaultDeleteEventListener implements DeleteEventListener {
|
|||
session.getPersistenceContext().incrementCascadeLevel();
|
||||
try {
|
||||
// cascade-delete to collections BEFORE the collection owner is deleted
|
||||
new Cascade( CascadingAction.DELETE, Cascade.AFTER_INSERT_BEFORE_DELETE, session )
|
||||
new Cascade( CascadingActions.DELETE, Cascade.AFTER_INSERT_BEFORE_DELETE, session )
|
||||
.cascade( persister, entity, transientEntities );
|
||||
}
|
||||
finally {
|
||||
|
@ -341,7 +341,7 @@ public class DefaultDeleteEventListener implements DeleteEventListener {
|
|||
session.getPersistenceContext().incrementCascadeLevel();
|
||||
try {
|
||||
// cascade-delete to many-to-one AFTER the parent was deleted
|
||||
new Cascade( CascadingAction.DELETE, Cascade.BEFORE_INSERT_AFTER_DELETE, session )
|
||||
new Cascade( CascadingActions.DELETE, Cascade.BEFORE_INSERT_AFTER_DELETE, session )
|
||||
.cascade( persister, entity, transientEntities );
|
||||
}
|
||||
finally {
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.jboss.logging.Logger;
|
|||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.PersistenceContext;
|
||||
|
@ -117,7 +117,7 @@ public class DefaultEvictEventListener implements EvictEventListener {
|
|||
// This is now handled by removeEntity()
|
||||
//session.getPersistenceContext().removeDatabaseSnapshot(key);
|
||||
|
||||
new Cascade( CascadingAction.EVICT, Cascade.AFTER_EVICT, session )
|
||||
new Cascade( CascadingActions.EVICT, Cascade.AFTER_EVICT, session )
|
||||
.cascade( persister, object );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.hibernate.LockMode;
|
|||
import org.hibernate.TransientObjectException;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.internal.ForeignKeys;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.event.spi.EventSource;
|
||||
|
@ -89,7 +89,7 @@ public class DefaultLockEventListener extends AbstractLockUpgradeEventListener i
|
|||
EventSource source = event.getSession();
|
||||
source.getPersistenceContext().incrementCascadeLevel();
|
||||
try {
|
||||
new Cascade(CascadingAction.LOCK, Cascade.AFTER_LOCK, source)
|
||||
new Cascade( CascadingActions.LOCK, Cascade.AFTER_LOCK, source)
|
||||
.cascade( persister, entity, event.getLockOptions() );
|
||||
}
|
||||
finally {
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.hibernate.WrongClassException;
|
|||
import org.hibernate.bytecode.instrumentation.spi.FieldInterceptor;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
|
@ -447,7 +448,7 @@ public class DefaultMergeEventListener extends AbstractSaveEventListener impleme
|
|||
|
||||
@Override
|
||||
protected CascadingAction getCascadeAction() {
|
||||
return CascadingAction.MERGE;
|
||||
return CascadingActions.MERGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.hibernate.HibernateException;
|
|||
import org.hibernate.ObjectDeletedException;
|
||||
import org.hibernate.PersistentObjectException;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.engine.spi.Status;
|
||||
|
@ -60,7 +61,7 @@ public class DefaultPersistEventListener extends AbstractSaveEventListener imple
|
|||
|
||||
@Override
|
||||
protected CascadingAction getCascadeAction() {
|
||||
return CascadingAction.PERSIST;
|
||||
return CascadingActions.PERSIST;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
package org.hibernate.event.internal;
|
||||
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
|
||||
/**
|
||||
* When persist is used as the cascade action, persistOnFlush should be used
|
||||
|
@ -31,6 +32,6 @@ import org.hibernate.engine.spi.CascadingAction;
|
|||
*/
|
||||
public class DefaultPersistOnFlushEventListener extends DefaultPersistEventListener {
|
||||
protected CascadingAction getCascadeAction() {
|
||||
return CascadingAction.PERSIST_ON_FLUSH;
|
||||
return CascadingActions.PERSIST_ON_FLUSH;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.hibernate.PersistentObjectException;
|
|||
import org.hibernate.UnresolvableObjectException;
|
||||
import org.hibernate.cache.spi.CacheKey;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
|
@ -119,7 +119,7 @@ public class DefaultRefreshEventListener implements RefreshEventListener {
|
|||
|
||||
// cascade the refresh prior to refreshing this entity
|
||||
refreshedAlready.put(object, object);
|
||||
new Cascade( CascadingAction.REFRESH, Cascade.BEFORE_REFRESH, source)
|
||||
new Cascade( CascadingActions.REFRESH, Cascade.BEFORE_REFRESH, source)
|
||||
.cascade( persister, object, refreshedAlready );
|
||||
|
||||
if ( e != null ) {
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.hibernate.ReplicationMode;
|
|||
import org.hibernate.TransientObjectException;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.engine.spi.Status;
|
||||
|
@ -207,7 +208,7 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
|
|||
EventSource source) {
|
||||
source.getPersistenceContext().incrementCascadeLevel();
|
||||
try {
|
||||
new Cascade( CascadingAction.REPLICATE, Cascade.AFTER_UPDATE, source )
|
||||
new Cascade( CascadingActions.REPLICATE, Cascade.AFTER_UPDATE, source )
|
||||
.cascade( persister, entity, replicationMode );
|
||||
}
|
||||
finally {
|
||||
|
@ -217,6 +218,6 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
|
|||
|
||||
@Override
|
||||
protected CascadingAction getCascadeAction() {
|
||||
return CascadingAction.REPLICATE;
|
||||
return CascadingActions.REPLICATE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.hibernate.TransientObjectException;
|
|||
import org.hibernate.classic.Lifecycle;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
|
@ -358,7 +359,7 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
|
|||
EventSource source = event.getSession();
|
||||
source.getPersistenceContext().incrementCascadeLevel();
|
||||
try {
|
||||
new Cascade( CascadingAction.SAVE_UPDATE, Cascade.AFTER_UPDATE, source )
|
||||
new Cascade( CascadingActions.SAVE_UPDATE, Cascade.AFTER_UPDATE, source )
|
||||
.cascade( persister, entity );
|
||||
}
|
||||
finally {
|
||||
|
@ -368,6 +369,6 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
|
|||
|
||||
@Override
|
||||
protected CascadingAction getCascadeAction() {
|
||||
return CascadingAction.SAVE_UPDATE;
|
||||
return CascadingActions.SAVE_UPDATE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,8 @@ package org.hibernate.event.spi;
|
|||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
||||
/**
|
||||
* Called after deleting an item from the datastore
|
||||
*
|
||||
|
@ -32,4 +34,6 @@ import java.io.Serializable;
|
|||
*/
|
||||
public interface PostDeleteEventListener extends Serializable {
|
||||
public void onPostDelete(PostDeleteEvent event);
|
||||
|
||||
public boolean requiresPostCommitHanding(EntityPersister persister);
|
||||
}
|
||||
|
|
|
@ -25,11 +25,25 @@ package org.hibernate.event.spi;
|
|||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
||||
/**
|
||||
* Called after insterting an item in the datastore
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public interface PostInsertEventListener extends Serializable {
|
||||
public void onPostInsert(PostInsertEvent event);
|
||||
|
||||
/**
|
||||
* Does this listener require that after transaction hooks be registered? Typically this is {@code true}
|
||||
* for post-insert event listeners, but may not be, for example, in JPA cases where there are no callbacks defined
|
||||
* for the particular entity.
|
||||
*
|
||||
* @param persister The persister for the entity in question.
|
||||
*
|
||||
* @return {@code true} if after transaction callbacks should be added.
|
||||
*/
|
||||
public boolean requiresPostCommitHanding(EntityPersister persister);
|
||||
}
|
||||
|
|
|
@ -25,6 +25,8 @@ package org.hibernate.event.spi;
|
|||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
||||
/**
|
||||
* Called after updating the datastore
|
||||
*
|
||||
|
@ -32,4 +34,6 @@ import java.io.Serializable;
|
|||
*/
|
||||
public interface PostUpdateEventListener extends Serializable {
|
||||
public void onPostUpdate(PostUpdateEvent event);
|
||||
|
||||
public boolean requiresPostCommitHanding(EntityPersister persister);
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ public class OptimizerFactory {
|
|||
return POOLED_LO;
|
||||
}
|
||||
else {
|
||||
LOG.debugf( "Unknown optimizer key [%s]; returning null assuming Optimizer impl class name" );
|
||||
LOG.debugf( "Unknown optimizer key [%s]; returning null assuming Optimizer impl class name", externalName );
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -237,5 +237,9 @@ public class CriteriaLoader extends OuterJoinLoader {
|
|||
protected List getResultList(List results, ResultTransformer resultTransformer) {
|
||||
return resolveResultTransformer( resultTransformer ).transformList( results );
|
||||
}
|
||||
|
||||
protected String getQueryIdentifier() {
|
||||
return "[CRITERIA] " + getSQLString();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class JoinedSubclass extends Subclass implements TableOwner {
|
|||
public Iterator getReferenceablePropertyIterator() {
|
||||
return getPropertyIterator();
|
||||
}
|
||||
|
||||
|
||||
public Object accept(PersistentClassVisitor mv) {
|
||||
return mv.accept(this);
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.hibernate.EntityMode;
|
|||
import org.hibernate.MappingException;
|
||||
import org.hibernate.PropertyNotFoundException;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.property.Getter;
|
||||
|
@ -128,8 +129,8 @@ public class Property implements Serializable, MetaAttributable {
|
|||
}
|
||||
int length = compositeType.getSubtypes().length;
|
||||
for ( int i=0; i<length; i++ ) {
|
||||
if ( compositeType.getCascadeStyle(i) != CascadeStyle.NONE ) {
|
||||
return CascadeStyle.ALL;
|
||||
if ( compositeType.getCascadeStyle(i) != CascadeStyles.NONE ) {
|
||||
return CascadeStyles.ALL;
|
||||
}
|
||||
}
|
||||
return getCascadeStyle( cascade );
|
||||
|
@ -146,16 +147,16 @@ public class Property implements Serializable, MetaAttributable {
|
|||
|
||||
private static CascadeStyle getCascadeStyle(String cascade) {
|
||||
if ( cascade==null || cascade.equals("none") ) {
|
||||
return CascadeStyle.NONE;
|
||||
return CascadeStyles.NONE;
|
||||
}
|
||||
else {
|
||||
StringTokenizer tokens = new StringTokenizer(cascade, ", ");
|
||||
CascadeStyle[] styles = new CascadeStyle[ tokens.countTokens() ] ;
|
||||
int i=0;
|
||||
while ( tokens.hasMoreTokens() ) {
|
||||
styles[i++] = CascadeStyle.getCascadeStyle( tokens.nextToken() );
|
||||
styles[i++] = CascadeStyles.getCascadeStyle( tokens.nextToken() );
|
||||
}
|
||||
return new CascadeStyle.MultipleCascadeStyle(styles);
|
||||
return new CascadeStyles.MultipleCascadeStyle(styles);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,8 @@
|
|||
*/
|
||||
package org.hibernate.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Field;
|
||||
import javax.persistence.AttributeConverter;
|
||||
import java.lang.reflect.TypeVariable;
|
||||
import java.sql.CallableStatement;
|
||||
|
@ -36,9 +38,9 @@ import java.util.Properties;
|
|||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.AnnotationException;
|
||||
import org.hibernate.FetchMode;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.cfg.AccessType;
|
||||
import org.hibernate.cfg.AttributeConverterDefinition;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.cfg.Mappings;
|
||||
|
@ -49,6 +51,7 @@ import org.hibernate.id.IdentityGenerator;
|
|||
import org.hibernate.id.PersistentIdentifierGenerator;
|
||||
import org.hibernate.id.factory.IdentifierGeneratorFactory;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
import org.hibernate.property.DirectPropertyAccessor;
|
||||
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
|
||||
import org.hibernate.type.Type;
|
||||
import org.hibernate.type.descriptor.ValueBinder;
|
||||
|
@ -61,6 +64,7 @@ import org.hibernate.type.descriptor.sql.BasicExtractor;
|
|||
import org.hibernate.type.descriptor.sql.JdbcTypeJavaClassMappings;
|
||||
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
|
||||
import org.hibernate.type.descriptor.sql.SqlTypeDescriptorRegistry;
|
||||
import org.hibernate.usertype.DynamicParameterizedType;
|
||||
|
||||
/**
|
||||
* Any value that maps to columns.
|
||||
|
@ -324,6 +328,11 @@ public class SimpleValue implements KeyValue {
|
|||
if ( typeName == null ) {
|
||||
throw new MappingException( "No type name" );
|
||||
}
|
||||
if ( typeParameters != null
|
||||
&& Boolean.valueOf( typeParameters.getProperty( DynamicParameterizedType.IS_DYNAMIC ) )
|
||||
&& typeParameters.get( DynamicParameterizedType.PARAMETER_TYPE ) == null ) {
|
||||
createParameterImpl();
|
||||
}
|
||||
|
||||
Type result = mappings.getTypeResolver().heuristicType( typeName, typeParameters );
|
||||
if ( result == null ) {
|
||||
|
@ -331,7 +340,7 @@ public class SimpleValue implements KeyValue {
|
|||
if ( table != null ) {
|
||||
msg += ", at table: " + table.getName();
|
||||
}
|
||||
if ( columns!=null && columns.size()>0 ) {
|
||||
if ( columns != null && columns.size() > 0 ) {
|
||||
msg += ", for columns: " + columns;
|
||||
}
|
||||
throw new MappingException( msg );
|
||||
|
@ -510,4 +519,97 @@ public class SimpleValue implements KeyValue {
|
|||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createParameterImpl() {
|
||||
try {
|
||||
String[] columnsNames = new String[columns.size()];
|
||||
for ( int i = 0; i < columns.size(); i++ ) {
|
||||
columnsNames[i] = ( (Column) columns.get( i ) ).getName();
|
||||
}
|
||||
|
||||
AccessType accessType = AccessType.getAccessStrategy( typeParameters
|
||||
.getProperty( DynamicParameterizedType.ACCESS_TYPE ) );
|
||||
final Class classEntity = ReflectHelper.classForName( typeParameters
|
||||
.getProperty( DynamicParameterizedType.ENTITY ) );
|
||||
final String propertyName = typeParameters.getProperty( DynamicParameterizedType.PROPERTY );
|
||||
|
||||
Annotation[] annotations;
|
||||
if ( accessType == AccessType.FIELD ) {
|
||||
annotations = ( (Field) new DirectPropertyAccessor().getGetter( classEntity, propertyName ).getMember() )
|
||||
.getAnnotations();
|
||||
|
||||
}
|
||||
else {
|
||||
annotations = ReflectHelper.getGetter( classEntity, propertyName ).getMethod().getAnnotations();
|
||||
}
|
||||
|
||||
typeParameters.put(
|
||||
DynamicParameterizedType.PARAMETER_TYPE,
|
||||
new ParameterTypeImpl( ReflectHelper.classForName( typeParameters
|
||||
.getProperty( DynamicParameterizedType.RETURNED_CLASS ) ), annotations, table.getCatalog(),
|
||||
table.getSchema(), table.getName(), Boolean.valueOf( typeParameters
|
||||
.getProperty( DynamicParameterizedType.IS_PRIMARY_KEY ) ), columnsNames ) );
|
||||
|
||||
}
|
||||
catch ( ClassNotFoundException cnfe ) {
|
||||
throw new MappingException( "Could not create DynamicParameterizedType for type: " + typeName, cnfe );
|
||||
}
|
||||
}
|
||||
|
||||
private final class ParameterTypeImpl implements DynamicParameterizedType.ParameterType {
|
||||
|
||||
private final Class returnedClass;
|
||||
private final Annotation[] annotationsMethod;
|
||||
private final String catalog;
|
||||
private final String schema;
|
||||
private final String table;
|
||||
private final boolean primaryKey;
|
||||
private final String[] columns;
|
||||
|
||||
private ParameterTypeImpl(Class returnedClass, Annotation[] annotationsMethod, String catalog, String schema,
|
||||
String table, boolean primaryKey, String[] columns) {
|
||||
this.returnedClass = returnedClass;
|
||||
this.annotationsMethod = annotationsMethod;
|
||||
this.catalog = catalog;
|
||||
this.schema = schema;
|
||||
this.table = table;
|
||||
this.primaryKey = primaryKey;
|
||||
this.columns = columns;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class getReturnedClass() {
|
||||
return returnedClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Annotation[] getAnnotationsMethod() {
|
||||
return annotationsMethod;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCatalog() {
|
||||
return catalog;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSchema() {
|
||||
return schema;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTable() {
|
||||
return table;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPrimaryKey() {
|
||||
return primaryKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getColumns() {
|
||||
return columns;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -311,7 +311,8 @@ public class Table implements RelationalModel, Serializable {
|
|||
}
|
||||
|
||||
if ( removeIt ) {
|
||||
uniqueKeys.remove( uniqueKeyEntry.getKey() );
|
||||
//uniqueKeys.remove( uniqueKeyEntry.getKey() );
|
||||
uniqueKeyEntries.remove();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -161,16 +161,7 @@ public class MetadataBuilderImpl implements MetadataBuilder {
|
|||
false
|
||||
);
|
||||
|
||||
multiTenancyStrategy = configService.getSetting(
|
||||
AvailableSettings.MULTI_TENANT,
|
||||
new ConfigurationService.Converter<org.hibernate.MultiTenancyStrategy>() {
|
||||
@Override
|
||||
public MultiTenancyStrategy convert(Object value) {
|
||||
return MultiTenancyStrategy.fromConfigValue( value );
|
||||
}
|
||||
},
|
||||
MultiTenancyStrategy.NONE
|
||||
);
|
||||
multiTenancyStrategy = MultiTenancyStrategy.determineMultiTenancyStrategy( configService.getSettings() );
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.hibernate.AssertionFailure;
|
|||
import org.hibernate.FetchMode;
|
||||
import org.hibernate.engine.FetchStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.id.MultipleHiLoPerTableGenerator;
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
|
||||
|
@ -68,22 +69,22 @@ public class EnumConversionHelper {
|
|||
public static CascadeStyle cascadeTypeToCascadeStyle(CascadeType cascadeType) {
|
||||
switch ( cascadeType ) {
|
||||
case ALL: {
|
||||
return CascadeStyle.ALL;
|
||||
return CascadeStyles.ALL;
|
||||
}
|
||||
case PERSIST: {
|
||||
return CascadeStyle.PERSIST;
|
||||
return CascadeStyles.PERSIST;
|
||||
}
|
||||
case MERGE: {
|
||||
return CascadeStyle.MERGE;
|
||||
return CascadeStyles.MERGE;
|
||||
}
|
||||
case REMOVE: {
|
||||
return CascadeStyle.DELETE;
|
||||
return CascadeStyles.DELETE;
|
||||
}
|
||||
case REFRESH: {
|
||||
return CascadeStyle.REFRESH;
|
||||
return CascadeStyles.REFRESH;
|
||||
}
|
||||
case DETACH: {
|
||||
return CascadeStyle.EVICT;
|
||||
return CascadeStyles.EVICT;
|
||||
}
|
||||
default: {
|
||||
throw new AssertionFailure( "Unknown cascade type" );
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.hibernate.LockMode;
|
|||
import org.hibernate.TruthValue;
|
||||
import org.hibernate.cache.spi.access.AccessType;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.engine.spi.ExecuteUpdateResultCheckStyle;
|
||||
import org.hibernate.jaxb.spi.hbm.JaxbCacheElement;
|
||||
import org.hibernate.jaxb.spi.hbm.JaxbColumnElement;
|
||||
|
@ -252,7 +253,7 @@ public class Helper {
|
|||
cascades = bindingContext.getMappingDefaults().getCascadeStyle();
|
||||
}
|
||||
for ( String cascade : StringHelper.split( ",", cascades ) ) {
|
||||
cascadeStyles.add( CascadeStyle.getCascadeStyle( cascade ) );
|
||||
cascadeStyles.add( CascadeStyles.getCascadeStyle( cascade ) );
|
||||
}
|
||||
return cascadeStyles;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,8 @@
|
|||
*/
|
||||
package org.hibernate.metamodel.spi;
|
||||
|
||||
import org.jboss.jandex.IndexResult;
|
||||
|
||||
import org.jboss.jandex.IndexView;
|
||||
|
||||
import org.hibernate.metamodel.MetadataSources;
|
||||
|
||||
|
@ -39,5 +40,5 @@ public interface MetadataSourcesContributor {
|
|||
* @param metadataSources
|
||||
* @param jandexIndex The Jandex index
|
||||
*/
|
||||
public void contribute(MetadataSources metadataSources, IndexResult jandexIndex);
|
||||
public void contribute(MetadataSources metadataSources, IndexView jandexIndex);
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.hibernate.FetchMode;
|
|||
import org.hibernate.engine.FetchStyle;
|
||||
import org.hibernate.engine.FetchTiming;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
|
@ -54,23 +55,23 @@ public abstract class AbstractPluralAttributeAssociationElementBinding
|
|||
public void setCascadeStyles(Iterable<CascadeStyle> cascadeStyles) {
|
||||
List<CascadeStyle> cascadeStyleList = new ArrayList<CascadeStyle>();
|
||||
for ( CascadeStyle style : cascadeStyles ) {
|
||||
if ( style != CascadeStyle.NONE ) {
|
||||
if ( style != CascadeStyles.NONE ) {
|
||||
cascadeStyleList.add( style );
|
||||
}
|
||||
if ( style == CascadeStyle.DELETE_ORPHAN ||
|
||||
style == CascadeStyle.ALL_DELETE_ORPHAN ) {
|
||||
if ( style == CascadeStyles.DELETE_ORPHAN ||
|
||||
style == CascadeStyles.ALL_DELETE_ORPHAN ) {
|
||||
orphanDelete = true;
|
||||
}
|
||||
}
|
||||
|
||||
if ( cascadeStyleList.isEmpty() ) {
|
||||
cascadeStyle = CascadeStyle.NONE;
|
||||
cascadeStyle = CascadeStyles.NONE;
|
||||
}
|
||||
else if ( cascadeStyleList.size() == 1 ) {
|
||||
cascadeStyle = cascadeStyleList.get( 0 );
|
||||
}
|
||||
else {
|
||||
cascadeStyle = new CascadeStyle.MultipleCascadeStyle(
|
||||
cascadeStyle = new CascadeStyles.MultipleCascadeStyle(
|
||||
cascadeStyleList.toArray( new CascadeStyle[ cascadeStyleList.size() ] )
|
||||
);
|
||||
}
|
||||
|
|
|
@ -169,6 +169,26 @@ public abstract class AbstractPluralAttributeBinding extends AbstractAttributeBi
|
|||
return pluralAttributeElementBinding;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FetchTiming getFetchTiming() {
|
||||
return fetchTiming;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFetchTiming(FetchTiming fetchTiming) {
|
||||
this.fetchTiming = fetchTiming;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FetchStyle getFetchStyle() {
|
||||
return fetchStyle;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFetchStyle(FetchStyle fetchStyle) {
|
||||
this.fetchStyle = fetchStyle;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCustomLoaderName() {
|
||||
return customLoaderName;
|
||||
|
@ -267,16 +287,6 @@ public abstract class AbstractPluralAttributeBinding extends AbstractAttributeBi
|
|||
this.batchSize = batchSize;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public String getReferencedPropertyName() {
|
||||
return referencedPropertyName;
|
||||
|
@ -324,23 +334,4 @@ public abstract class AbstractPluralAttributeBinding extends AbstractAttributeBi
|
|||
return fetchTiming != FetchTiming.IMMEDIATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FetchTiming getFetchTiming() {
|
||||
return fetchTiming;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFetchTiming(FetchTiming fetchTiming) {
|
||||
this.fetchTiming = fetchTiming;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FetchStyle getFetchStyle() {
|
||||
return fetchStyle;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFetchStyle(FetchStyle fetchStyle) {
|
||||
this.fetchStyle = fetchStyle;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.util.Map;
|
|||
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -125,18 +126,18 @@ public enum CascadeType {
|
|||
|
||||
private static final Map<CascadeType, CascadeStyle> cascadeTypeToCascadeStyle = new HashMap<CascadeType, CascadeStyle>();
|
||||
static {
|
||||
cascadeTypeToCascadeStyle.put( ALL, CascadeStyle.ALL );
|
||||
cascadeTypeToCascadeStyle.put( ALL_DELETE_ORPHAN, CascadeStyle.ALL_DELETE_ORPHAN );
|
||||
cascadeTypeToCascadeStyle.put( UPDATE, CascadeStyle.UPDATE );
|
||||
cascadeTypeToCascadeStyle.put( PERSIST, CascadeStyle.PERSIST );
|
||||
cascadeTypeToCascadeStyle.put( MERGE, CascadeStyle.MERGE );
|
||||
cascadeTypeToCascadeStyle.put( LOCK, CascadeStyle.LOCK );
|
||||
cascadeTypeToCascadeStyle.put( REFRESH, CascadeStyle.REFRESH );
|
||||
cascadeTypeToCascadeStyle.put( REPLICATE, CascadeStyle.REPLICATE );
|
||||
cascadeTypeToCascadeStyle.put( EVICT, CascadeStyle.EVICT );
|
||||
cascadeTypeToCascadeStyle.put( DELETE, CascadeStyle.DELETE );
|
||||
cascadeTypeToCascadeStyle.put( DELETE_ORPHAN, CascadeStyle.DELETE_ORPHAN );
|
||||
cascadeTypeToCascadeStyle.put( NONE, CascadeStyle.NONE );
|
||||
cascadeTypeToCascadeStyle.put( ALL, CascadeStyles.ALL );
|
||||
cascadeTypeToCascadeStyle.put( ALL_DELETE_ORPHAN, CascadeStyles.ALL_DELETE_ORPHAN );
|
||||
cascadeTypeToCascadeStyle.put( UPDATE, CascadeStyles.UPDATE );
|
||||
cascadeTypeToCascadeStyle.put( PERSIST, CascadeStyles.PERSIST );
|
||||
cascadeTypeToCascadeStyle.put( MERGE, CascadeStyles.MERGE );
|
||||
cascadeTypeToCascadeStyle.put( LOCK, CascadeStyles.LOCK );
|
||||
cascadeTypeToCascadeStyle.put( REFRESH, CascadeStyles.REFRESH );
|
||||
cascadeTypeToCascadeStyle.put( REPLICATE, CascadeStyles.REPLICATE );
|
||||
cascadeTypeToCascadeStyle.put( EVICT, CascadeStyles.EVICT );
|
||||
cascadeTypeToCascadeStyle.put( DELETE, CascadeStyles.DELETE );
|
||||
cascadeTypeToCascadeStyle.put( DELETE_ORPHAN, CascadeStyles.DELETE_ORPHAN );
|
||||
cascadeTypeToCascadeStyle.put( NONE, CascadeStyles.NONE );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.hibernate.FetchMode;
|
|||
import org.hibernate.engine.FetchStyle;
|
||||
import org.hibernate.engine.FetchTiming;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.metamodel.spi.domain.SingularAttribute;
|
||||
import org.hibernate.metamodel.spi.source.MetaAttributeContext;
|
||||
|
||||
|
@ -124,18 +125,18 @@ public class ManyToOneAttributeBinding
|
|||
public void setCascadeStyles(Iterable<CascadeStyle> cascadeStyles) {
|
||||
List<CascadeStyle> cascadeStyleList = new ArrayList<CascadeStyle>();
|
||||
for ( CascadeStyle style : cascadeStyles ) {
|
||||
if ( style != CascadeStyle.NONE ) {
|
||||
if ( style != CascadeStyles.NONE ) {
|
||||
cascadeStyleList.add( style );
|
||||
}
|
||||
}
|
||||
if ( cascadeStyleList.isEmpty() ) {
|
||||
cascadeStyle = CascadeStyle.NONE;
|
||||
cascadeStyle = CascadeStyles.NONE;
|
||||
}
|
||||
else if ( cascadeStyleList.size() == 1 ) {
|
||||
cascadeStyle = cascadeStyleList.get( 0 );
|
||||
}
|
||||
else {
|
||||
cascadeStyle = new CascadeStyle.MultipleCascadeStyle(
|
||||
cascadeStyle = new CascadeStyles.MultipleCascadeStyle(
|
||||
cascadeStyleList.toArray( new CascadeStyle[cascadeStyleList.size()] )
|
||||
);
|
||||
}
|
||||
|
|
|
@ -64,7 +64,8 @@ import org.hibernate.engine.internal.Versioning;
|
|||
import org.hibernate.engine.jdbc.batch.internal.BasicBatchKey;
|
||||
import org.hibernate.engine.spi.CachedNaturalIdValueSource;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.ExecuteUpdateResultCheckStyle;
|
||||
|
@ -1083,7 +1084,7 @@ public abstract class AbstractEntityPersister
|
|||
}
|
||||
}
|
||||
if ( cascadeStyle == null ) {
|
||||
cascadeStyle = CascadeStyle.NONE;
|
||||
cascadeStyle = CascadeStyles.NONE;
|
||||
}
|
||||
cascades.add( cascadeStyle );
|
||||
|
||||
|
@ -1542,6 +1543,107 @@ public abstract class AbstractEntityPersister
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Serializable getIdByUniqueKey(Serializable key, String uniquePropertyName, SessionImplementor session) throws HibernateException {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.tracef(
|
||||
"resolving unique key [%s] to identifier for entity [%s]",
|
||||
key,
|
||||
getEntityName()
|
||||
);
|
||||
}
|
||||
|
||||
int propertyIndex = getSubclassPropertyIndex( uniquePropertyName );
|
||||
if ( propertyIndex < 0 ) {
|
||||
throw new HibernateException(
|
||||
"Could not determine Type for property [" + uniquePropertyName + "] on entity [" + getEntityName() + "]"
|
||||
);
|
||||
}
|
||||
Type propertyType = getSubclassPropertyType( propertyIndex );
|
||||
|
||||
try {
|
||||
PreparedStatement ps = session.getTransactionCoordinator()
|
||||
.getJdbcCoordinator()
|
||||
.getStatementPreparer()
|
||||
.prepareStatement( generateIdByUniqueKeySelectString( uniquePropertyName ) );
|
||||
try {
|
||||
propertyType.nullSafeSet( ps, key, 1, session );
|
||||
ResultSet rs = ps.executeQuery();
|
||||
try {
|
||||
//if there is no resulting row, return null
|
||||
if ( !rs.next() ) {
|
||||
return null;
|
||||
}
|
||||
return (Serializable) getIdentifierType().nullSafeGet( rs, getIdentifierAliases(), session, null );
|
||||
}
|
||||
finally {
|
||||
rs.close();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
ps.close();
|
||||
}
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
throw getFactory().getSQLExceptionHelper().convert(
|
||||
e,
|
||||
String.format(
|
||||
"could not resolve unique property [%s] to identifier for entity [%s]",
|
||||
uniquePropertyName,
|
||||
getEntityName()
|
||||
),
|
||||
getSQLSnapshotSelectString()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected String generateIdByUniqueKeySelectString(String uniquePropertyName) {
|
||||
Select select = new Select( getFactory().getDialect() );
|
||||
|
||||
if ( getFactory().getSettings().isCommentsEnabled() ) {
|
||||
select.setComment( "resolve id by unique property [" + getEntityName() + "." + uniquePropertyName + "]" );
|
||||
}
|
||||
|
||||
final String rooAlias = getRootAlias();
|
||||
|
||||
select.setFromClause( fromTableFragment( rooAlias ) + fromJoinFragment( rooAlias, true, false ) );
|
||||
|
||||
SelectFragment selectFragment = new SelectFragment();
|
||||
selectFragment.addColumns( rooAlias, getIdentifierColumnNames(), getIdentifierAliases() );
|
||||
select.setSelectClause( selectFragment );
|
||||
|
||||
StringBuilder whereClauseBuffer = new StringBuilder();
|
||||
final int uniquePropertyIndex = getSubclassPropertyIndex( uniquePropertyName );
|
||||
final String uniquePropertyTableAlias = generateTableAlias(
|
||||
rooAlias,
|
||||
getSubclassPropertyTableNumber( uniquePropertyIndex )
|
||||
);
|
||||
String sep = "";
|
||||
for ( String columnTemplate : getSubclassPropertyColumnReaderTemplateClosure()[uniquePropertyIndex] ) {
|
||||
if ( columnTemplate == null ) {
|
||||
continue;
|
||||
}
|
||||
final String columnReference = StringHelper.replace( columnTemplate, Template.TEMPLATE, uniquePropertyTableAlias );
|
||||
whereClauseBuffer.append( sep ).append( columnReference ).append( "=?" );
|
||||
sep = " and ";
|
||||
}
|
||||
for ( String formulaTemplate : getSubclassPropertyFormulaTemplateClosure()[uniquePropertyIndex] ) {
|
||||
if ( formulaTemplate == null ) {
|
||||
continue;
|
||||
}
|
||||
final String formulaReference = StringHelper.replace( formulaTemplate, Template.TEMPLATE, uniquePropertyTableAlias );
|
||||
whereClauseBuffer.append( sep ).append( formulaReference ).append( "=?" );
|
||||
sep = " and ";
|
||||
}
|
||||
whereClauseBuffer.append( whereJoinFragment( rooAlias, true, false ) );
|
||||
|
||||
select.setWhereClause( whereClauseBuffer.toString() );
|
||||
|
||||
return select.setOuterJoins( "", "" ).toStatementString();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate the SQL that selects the version number by id
|
||||
*/
|
||||
|
@ -3781,11 +3883,11 @@ public abstract class AbstractEntityPersister
|
|||
|
||||
loaders.put(
|
||||
"merge",
|
||||
new CascadeEntityLoader( this, CascadingAction.MERGE, getFactory() )
|
||||
new CascadeEntityLoader( this, CascadingActions.MERGE, getFactory() )
|
||||
);
|
||||
loaders.put(
|
||||
"refresh",
|
||||
new CascadeEntityLoader( this, CascadingAction.REFRESH, getFactory() )
|
||||
new CascadeEntityLoader( this, CascadingActions.REFRESH, getFactory() )
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -509,6 +509,8 @@ public interface EntityPersister extends OptimisticCacheSource {
|
|||
public Object[] getDatabaseSnapshot(Serializable id, SessionImplementor session)
|
||||
throws HibernateException;
|
||||
|
||||
public Serializable getIdByUniqueKey(Serializable key, String uniquePropertyName, SessionImplementor session);
|
||||
|
||||
/**
|
||||
* Get the current version of the object, or return null if there is no row for
|
||||
* the given identifier. In the case of unversioned data, return any object
|
||||
|
|
|
@ -23,14 +23,21 @@
|
|||
*/
|
||||
package org.hibernate.proxy;
|
||||
|
||||
import javax.naming.NamingException;
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.LazyInitializationException;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionException;
|
||||
import org.hibernate.TransientObjectException;
|
||||
import org.hibernate.cfg.AvailableSettings;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.SessionFactoryRegistry;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
||||
/**
|
||||
|
@ -41,6 +48,8 @@ import org.hibernate.persister.entity.EntityPersister;
|
|||
* @author Gavin King
|
||||
*/
|
||||
public abstract class AbstractLazyInitializer implements LazyInitializer {
|
||||
private static final Logger log = Logger.getLogger( AbstractLazyInitializer.class );
|
||||
|
||||
private String entityName;
|
||||
private Serializable id;
|
||||
private Object target;
|
||||
|
@ -50,6 +59,9 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
private transient SessionImplementor session;
|
||||
private Boolean readOnlyBeforeAttachedToSession;
|
||||
|
||||
private String sessionFactoryUuid;
|
||||
private boolean specjLazyLoad = false;
|
||||
|
||||
/**
|
||||
* For serialization from the non-pojo initializers (HHH-3309)
|
||||
*/
|
||||
|
@ -104,12 +116,12 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
public final void setSession(SessionImplementor s) throws HibernateException {
|
||||
if ( s != session ) {
|
||||
// check for s == null first, since it is least expensive
|
||||
if ( s == null ){
|
||||
if ( s == null ) {
|
||||
unsetSession();
|
||||
}
|
||||
else if ( isConnectedToSession() ) {
|
||||
//TODO: perhaps this should be some other RuntimeException...
|
||||
throw new HibernateException("illegally attempted to associate a proxy with two open Sessions");
|
||||
throw new HibernateException( "illegally attempted to associate a proxy with two open Sessions" );
|
||||
}
|
||||
else {
|
||||
// s != null
|
||||
|
@ -117,7 +129,7 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
if ( readOnlyBeforeAttachedToSession == null ) {
|
||||
// use the default read-only/modifiable setting
|
||||
final EntityPersister persister = s.getFactory().getEntityPersister( entityName );
|
||||
setReadOnly( s.getPersistenceContext().isDefaultReadOnly() || ! persister.isMutable() );
|
||||
setReadOnly( s.getPersistenceContext().isDefaultReadOnly() || !persister.isMutable() );
|
||||
}
|
||||
else {
|
||||
// use the read-only/modifiable setting indicated during deserialization
|
||||
|
@ -137,6 +149,7 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
|
||||
@Override
|
||||
public final void unsetSession() {
|
||||
prepareForPossibleSpecialSpecjInitialization();
|
||||
session = null;
|
||||
readOnly = false;
|
||||
readOnlyBeforeAttachedToSession = null;
|
||||
|
@ -144,18 +157,21 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
|
||||
@Override
|
||||
public final void initialize() throws HibernateException {
|
||||
if (!initialized) {
|
||||
if ( session==null ) {
|
||||
throw new LazyInitializationException("could not initialize proxy - no Session");
|
||||
if ( !initialized ) {
|
||||
if ( specjLazyLoad ) {
|
||||
specialSpecjInitialization();
|
||||
}
|
||||
else if ( session == null ) {
|
||||
throw new LazyInitializationException( "could not initialize proxy - no Session" );
|
||||
}
|
||||
else if ( !session.isOpen() ) {
|
||||
throw new LazyInitializationException("could not initialize proxy - the owning Session was closed");
|
||||
throw new LazyInitializationException( "could not initialize proxy - the owning Session was closed" );
|
||||
}
|
||||
else if ( !session.isConnected() ) {
|
||||
throw new LazyInitializationException("could not initialize proxy - the owning Session is disconnected");
|
||||
throw new LazyInitializationException( "could not initialize proxy - the owning Session is disconnected" );
|
||||
}
|
||||
else {
|
||||
target = session.immediateLoad(entityName, id);
|
||||
target = session.immediateLoad( entityName, id );
|
||||
initialized = true;
|
||||
checkTargetState();
|
||||
}
|
||||
|
@ -165,6 +181,67 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
}
|
||||
}
|
||||
|
||||
protected void specialSpecjInitialization() {
|
||||
if ( session == null ) {
|
||||
//we have a detached collection thats set to null, reattach
|
||||
if ( sessionFactoryUuid == null ) {
|
||||
throw new LazyInitializationException( "could not initialize proxy - no Session" );
|
||||
}
|
||||
try {
|
||||
SessionFactoryImplementor sf = (SessionFactoryImplementor)
|
||||
SessionFactoryRegistry.INSTANCE.getSessionFactory( sessionFactoryUuid );
|
||||
SessionImplementor session = (SessionImplementor) sf.openSession();
|
||||
|
||||
try {
|
||||
target = session.immediateLoad( entityName, id );
|
||||
}
|
||||
finally {
|
||||
// make sure the just opened temp session gets closed!
|
||||
try {
|
||||
( (Session) session ).close();
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.warn( "Unable to close temporary session used to load lazy proxy associated to no session" );
|
||||
}
|
||||
}
|
||||
initialized = true;
|
||||
checkTargetState();
|
||||
}
|
||||
catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
throw new LazyInitializationException( e.getMessage() );
|
||||
}
|
||||
}
|
||||
else if ( session.isOpen() && session.isConnected() ) {
|
||||
target = session.immediateLoad( entityName, id );
|
||||
initialized = true;
|
||||
checkTargetState();
|
||||
}
|
||||
else {
|
||||
throw new LazyInitializationException( "could not initialize proxy - Session was closed or disced" );
|
||||
}
|
||||
}
|
||||
|
||||
protected void prepareForPossibleSpecialSpecjInitialization() {
|
||||
if ( session != null ) {
|
||||
specjLazyLoad =
|
||||
Boolean.parseBoolean(
|
||||
session.getFactory()
|
||||
.getProperties()
|
||||
.getProperty( AvailableSettings.ENABLE_LAZY_LOAD_NO_TRANS )
|
||||
);
|
||||
|
||||
if ( specjLazyLoad && sessionFactoryUuid == null ) {
|
||||
try {
|
||||
sessionFactoryUuid = (String) session.getFactory().getReference().get( "uuid" ).getContent();
|
||||
}
|
||||
catch (NamingException e) {
|
||||
//not much we can do if this fails...
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkTargetState() {
|
||||
if ( !unwrap ) {
|
||||
if ( target == null ) {
|
||||
|
@ -205,7 +282,7 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
@Override
|
||||
public final Object getImplementation(SessionImplementor s) throws HibernateException {
|
||||
final EntityKey entityKey = generateEntityKeyOrNull( getIdentifier(), s, getEntityName() );
|
||||
return ( entityKey == null ? null : s.getPersistenceContext().getEntity( entityKey ) );
|
||||
return (entityKey == null ? null : s.getPersistenceContext().getEntity( entityKey ));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -221,17 +298,19 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
|
||||
@Override
|
||||
public final boolean isReadOnlySettingAvailable() {
|
||||
return ( session != null && ! session.isClosed() );
|
||||
return (session != null && !session.isClosed());
|
||||
}
|
||||
|
||||
private void errorIfReadOnlySettingNotAvailable() {
|
||||
if ( session == null ) {
|
||||
throw new TransientObjectException(
|
||||
"Proxy is detached (i.e, session is null). The read-only/modifiable setting is only accessible when the proxy is associated with an open session." );
|
||||
"Proxy is detached (i.e, session is null). The read-only/modifiable setting is only accessible when the proxy is associated with an open session."
|
||||
);
|
||||
}
|
||||
if ( session.isClosed() ) {
|
||||
throw new SessionException(
|
||||
"Session is closed. The read-only/modifiable setting is only accessible when the proxy is associated with an open session." );
|
||||
"Session is closed. The read-only/modifiable setting is only accessible when the proxy is associated with an open session."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -247,8 +326,8 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
// only update if readOnly is different from current setting
|
||||
if ( this.readOnly != readOnly ) {
|
||||
final EntityPersister persister = session.getFactory().getEntityPersister( entityName );
|
||||
if ( ! persister.isMutable() && ! readOnly ) {
|
||||
throw new IllegalStateException( "cannot make proxies for immutable entities modifiable");
|
||||
if ( !persister.isMutable() && !readOnly ) {
|
||||
throw new IllegalStateException( "cannot make proxies for immutable entities modifiable" );
|
||||
}
|
||||
this.readOnly = readOnly;
|
||||
if ( initialized ) {
|
||||
|
@ -263,13 +342,14 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
/**
|
||||
* Get the read-only/modifiable setting that should be put in affect when it is
|
||||
* attached to a session.
|
||||
*
|
||||
* <p/>
|
||||
* This method should only be called during serialization when read-only/modifiable setting
|
||||
* is not available (i.e., isReadOnlySettingAvailable() == false)
|
||||
*
|
||||
* @return null, if the default setting should be used;
|
||||
* true, for read-only;
|
||||
* false, for modifiable
|
||||
* true, for read-only;
|
||||
* false, for modifiable
|
||||
*
|
||||
* @throws IllegalStateException if isReadOnlySettingAvailable() == true
|
||||
*/
|
||||
protected final Boolean isReadOnlyBeforeAttachedToSession() {
|
||||
|
@ -284,12 +364,13 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
|
|||
/**
|
||||
* Set the read-only/modifiable setting that should be put in affect when it is
|
||||
* attached to a session.
|
||||
*
|
||||
* <p/>
|
||||
* This method should only be called during deserialization, before associating
|
||||
* the proxy with a session.
|
||||
*
|
||||
* @param readOnlyBeforeAttachedToSession, the read-only/modifiable setting to use when
|
||||
* associated with a session; null indicates that the default should be used.
|
||||
*
|
||||
* @throws IllegalStateException if isReadOnlySettingAvailable() == true
|
||||
*/
|
||||
/* package-private */
|
||||
|
|
|
@ -52,6 +52,7 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
|
|||
|
||||
private final ClassLoader classClassLoader;
|
||||
private final ClassLoader resourcesClassLoader;
|
||||
private final ClassLoader serviceLoaderClassLoader;
|
||||
|
||||
private final StrategyInstanceResolverImpl strategyInstanceResolver = new StrategyInstanceResolverImpl( this );
|
||||
|
||||
|
@ -108,6 +109,8 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
|
|||
};
|
||||
|
||||
this.resourcesClassLoader = resourcesClassLoader;
|
||||
|
||||
this.serviceLoaderClassLoader = buildServiceLoaderClassLoader();
|
||||
}
|
||||
|
||||
@SuppressWarnings( {"UnusedDeclaration"})
|
||||
|
@ -138,6 +141,68 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
|
|||
}
|
||||
}
|
||||
|
||||
private ClassLoader buildServiceLoaderClassLoader() {
|
||||
return new ClassLoader(null) {
|
||||
final ClassLoader[] classLoaderArray = new ClassLoader[] {
|
||||
// first look on the hibernate class loader
|
||||
getClass().getClassLoader(),
|
||||
// next look on the resource class loader
|
||||
resourcesClassLoader,
|
||||
// finally look on the combined class class loader
|
||||
classClassLoader
|
||||
};
|
||||
|
||||
@Override
|
||||
public Enumeration<URL> getResources(String name) throws IOException {
|
||||
final HashSet<URL> resourceUrls = new HashSet<URL>();
|
||||
|
||||
for ( ClassLoader classLoader : classLoaderArray ) {
|
||||
final Enumeration<URL> urls = classLoader.getResources( name );
|
||||
while ( urls.hasMoreElements() ) {
|
||||
resourceUrls.add( urls.nextElement() );
|
||||
}
|
||||
}
|
||||
|
||||
return new Enumeration<URL>() {
|
||||
final Iterator<URL> resourceUrlIterator = resourceUrls.iterator();
|
||||
@Override
|
||||
public boolean hasMoreElements() {
|
||||
return resourceUrlIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL nextElement() {
|
||||
return resourceUrlIterator.next();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected URL findResource(String name) {
|
||||
for ( ClassLoader classLoader : classLoaderArray ) {
|
||||
final URL resource = classLoader.getResource( name );
|
||||
if ( resource != null ) {
|
||||
return resource;
|
||||
}
|
||||
}
|
||||
return super.findResource( name );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<?> findClass(String name) throws ClassNotFoundException {
|
||||
for ( ClassLoader classLoader : classLoaderArray ) {
|
||||
try {
|
||||
return classLoader.loadClass( name );
|
||||
}
|
||||
catch (Exception ignore) {
|
||||
}
|
||||
}
|
||||
|
||||
throw new ClassNotFoundException( "Could not load requested class : " + name );
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings( {"unchecked"})
|
||||
public <T> Class<T> classForName(String className) {
|
||||
|
@ -230,55 +295,6 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
|
|||
|
||||
@Override
|
||||
public <S> LinkedHashSet<S> loadJavaServices(Class<S> serviceContract) {
|
||||
final ClassLoader serviceLoaderClassLoader = new ClassLoader(null) {
|
||||
final ClassLoader[] classLoaderArray = new ClassLoader[] {
|
||||
// first look on the hibernate class loader
|
||||
getClass().getClassLoader(),
|
||||
// next look on the resource class loader
|
||||
resourcesClassLoader,
|
||||
// finally look on the combined class class loader
|
||||
classClassLoader
|
||||
};
|
||||
|
||||
@Override
|
||||
public Enumeration<URL> getResources(String name) throws IOException {
|
||||
final HashSet<URL> resourceUrls = new HashSet<URL>();
|
||||
|
||||
for ( ClassLoader classLoader : classLoaderArray ) {
|
||||
final Enumeration<URL> urls = classLoader.getResources( name );
|
||||
while ( urls.hasMoreElements() ) {
|
||||
resourceUrls.add( urls.nextElement() );
|
||||
}
|
||||
}
|
||||
|
||||
return new Enumeration<URL>() {
|
||||
final Iterator<URL> resourceUrlIterator = resourceUrls.iterator();
|
||||
@Override
|
||||
public boolean hasMoreElements() {
|
||||
return resourceUrlIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL nextElement() {
|
||||
return resourceUrlIterator.next();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<?> findClass(String name) throws ClassNotFoundException {
|
||||
for ( ClassLoader classLoader : classLoaderArray ) {
|
||||
try {
|
||||
return classLoader.loadClass( name );
|
||||
}
|
||||
catch (Exception ignore) {
|
||||
}
|
||||
}
|
||||
|
||||
throw new ClassNotFoundException( "Could not load requested class : " + name );
|
||||
}
|
||||
};
|
||||
|
||||
final ServiceLoader<S> loader = ServiceLoader.load( serviceContract, serviceLoaderClassLoader );
|
||||
final LinkedHashSet<S> services = new LinkedHashSet<S>();
|
||||
for ( S service : loader ) {
|
||||
|
@ -288,6 +304,37 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
|
|||
return services;
|
||||
}
|
||||
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
// completely temporary !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
public static interface Work<T> {
|
||||
public T perform();
|
||||
}
|
||||
|
||||
public <T> T withTccl(Work<T> work) {
|
||||
final ClassLoader tccl = Thread.currentThread().getContextClassLoader();
|
||||
|
||||
boolean set = false;
|
||||
|
||||
try {
|
||||
Thread.currentThread().setContextClassLoader( serviceLoaderClassLoader);
|
||||
set = true;
|
||||
}
|
||||
catch (Exception ignore) {
|
||||
}
|
||||
|
||||
try {
|
||||
return work.perform();
|
||||
}
|
||||
finally {
|
||||
if ( set ) {
|
||||
Thread.currentThread().setContextClassLoader( tccl );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StrategyInstanceResolver getStrategyInstanceResolver() {
|
||||
return strategyInstanceResolver;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2011, Red Hat Inc. or third-party contributors as
|
||||
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
|
@ -56,8 +56,8 @@ public interface JdbcConnectionAccess extends Serializable {
|
|||
* Does the underlying provider of connections support aggressive releasing of connections (and re-acquisition
|
||||
* of those connections later, if need be) in JTA environments?
|
||||
*
|
||||
* @see ConnectionProvider#supportsAggressiveRelease()
|
||||
* @see MultiTenantConnectionProvider#supportsAggressiveRelease()
|
||||
* @see org.hibernate.service.jdbc.connections.spi.ConnectionProvider#supportsAggressiveRelease()
|
||||
* @see org.hibernate.service.jdbc.connections.spi.MultiTenantConnectionProvider#supportsAggressiveRelease()
|
||||
*/
|
||||
public boolean supportsAggressiveRelease();
|
||||
}
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.service.jdbc.env.internal;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.engine.jdbc.internal.TypeInfo;
|
||||
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
|
||||
import org.hibernate.metamodel.spi.relational.Identifier;
|
||||
import org.hibernate.service.Service;
|
||||
import org.hibernate.service.jdbc.env.spi.ExtractedDatabaseMetaData;
|
||||
import org.hibernate.service.jdbc.env.spi.IdentifierHelper;
|
||||
import org.hibernate.service.jdbc.env.spi.QualifiedObjectNameSupport;
|
||||
|
||||
/**
|
||||
* Initial look at this concept we keep talking about with merging information from {@link java.sql.DatabaseMetaData}
|
||||
* and {@link org.hibernate.dialect.Dialect}
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public interface JdbcEnvironment extends Service {
|
||||
/**
|
||||
* Get the dialect for this environment.
|
||||
*
|
||||
* @return The dialect.
|
||||
*/
|
||||
public Dialect getDialect();
|
||||
|
||||
/**
|
||||
* Access to the bits of information we pulled off the JDBC {@link java.sql.DatabaseMetaData} (that did not get
|
||||
* "interpreted" into the helpers/delegates available here).
|
||||
*
|
||||
* @return The values extracted from JDBC DatabaseMetaData
|
||||
*/
|
||||
public ExtractedDatabaseMetaData getExtractedDatabaseMetaData();
|
||||
|
||||
/**
|
||||
* Get the current database catalog. Typically will come from either {@link java.sql.Connection#getCatalog()}
|
||||
* or {@link org.hibernate.cfg.AvailableSettings#DEFAULT_CATALOG}.
|
||||
*
|
||||
* @return The current catalog.
|
||||
*/
|
||||
public Identifier getCurrentCatalog();
|
||||
|
||||
/**
|
||||
* Get the current database catalog. Typically will come from either
|
||||
* {@link org.hibernate.service.jdbc.env.spi.SchemaNameResolver#resolveSchemaName(java.sql.Connection)} or
|
||||
* {@link org.hibernate.cfg.AvailableSettings#DEFAULT_CATALOG}.
|
||||
*
|
||||
* @return The current schema
|
||||
*/
|
||||
public Identifier getCurrentSchema();
|
||||
|
||||
/**
|
||||
* Obtain support for reading and writing qualified object names.
|
||||
*
|
||||
* @return Qualified name support.
|
||||
*/
|
||||
public QualifiedObjectNameSupport getQualifiedObjectNameSupport();
|
||||
|
||||
/**
|
||||
* Obtain the helper for dealing with identifiers in this environment.
|
||||
*
|
||||
* @return The identifier helper.
|
||||
*/
|
||||
public IdentifierHelper getIdentifierHelper();
|
||||
|
||||
/**
|
||||
* Get the complete set of reserved words for this environment. These are significant because they represent
|
||||
* the complete set of terms that MUST BE quoted if used as identifiers. This allows us to apply auto-quoting
|
||||
* in the metamodel based on these terms.
|
||||
*
|
||||
* Note that the standard IdentifierHelper returned by {@link #getIdentifierHelper()} already accounts for
|
||||
* auto-quoting :) yaay!
|
||||
*
|
||||
* @return Reserved words
|
||||
*/
|
||||
public Set<String> getReservedWords();
|
||||
|
||||
/**
|
||||
* Obtain the helper for dealing with JDBC {@link java.sql.SQLException} faults.
|
||||
*
|
||||
* @return This environment's helper.
|
||||
*/
|
||||
public SqlExceptionHelper getSqlExceptionHelper();
|
||||
|
||||
/**
|
||||
* Find type information for the type identified by the given "JDBC type code".
|
||||
*
|
||||
* @param jdbcTypeCode The JDBC type code.
|
||||
*
|
||||
* @return The corresponding type info.
|
||||
*/
|
||||
public TypeInfo getTypeInfoForJdbcCode(int jdbcTypeCode);
|
||||
}
|
|
@ -151,6 +151,11 @@ public class Select {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Select setSelectClause(SelectFragment selectFragment) {
|
||||
setSelectClause( selectFragment.toFragmentString().substring( 2 ) );
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the whereClause.
|
||||
* @param whereClause The whereClause to set
|
||||
|
@ -204,5 +209,4 @@ public class Select {
|
|||
LockOptions.copy(lockOptions, this.lockOptions);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.hibernate.FetchMode;
|
|||
import org.hibernate.PropertyNotFoundException;
|
||||
import org.hibernate.engine.internal.UnsavedValueFactory;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.engine.spi.IdentifierValue;
|
||||
import org.hibernate.engine.spi.VersionValue;
|
||||
import org.hibernate.id.IdentifierGenerator;
|
||||
|
@ -212,7 +213,7 @@ public class PropertyFactory {
|
|||
|
||||
final CascadeStyle cascadeStyle = property.isAssociation()
|
||||
? ( (SingularAssociationAttributeBinding) property ).getCascadeStyle()
|
||||
: CascadeStyle.NONE;
|
||||
: CascadeStyles.NONE;
|
||||
|
||||
return new VersionProperty(
|
||||
property.getAttribute().getName(),
|
||||
|
@ -296,7 +297,7 @@ public class PropertyFactory {
|
|||
final SingularAttributeBinding singularAttributeBinding = ( SingularAttributeBinding ) property;
|
||||
final CascadeStyle cascadeStyle = singularAttributeBinding.isAssociation()
|
||||
? ( (SingularAssociationAttributeBinding) singularAttributeBinding ).getCascadeStyle()
|
||||
: CascadeStyle.NONE;
|
||||
: CascadeStyles.NONE;
|
||||
final FetchMode fetchMode = singularAttributeBinding.isAssociation()
|
||||
? ( (SingularAssociationAttributeBinding) singularAttributeBinding ).getFetchMode()
|
||||
: FetchMode.DEFAULT;
|
||||
|
@ -325,7 +326,7 @@ public class PropertyFactory {
|
|||
final AbstractPluralAttributeBinding pluralAttributeBinding = (AbstractPluralAttributeBinding) property;
|
||||
final CascadeStyle cascadeStyle = pluralAttributeBinding.isAssociation()
|
||||
? ( (PluralAttributeAssociationElementBinding) pluralAttributeBinding.getPluralAttributeElementBinding() ).getCascadeStyle()
|
||||
: CascadeStyle.NONE;
|
||||
: CascadeStyles.NONE;
|
||||
final FetchMode fetchMode = pluralAttributeBinding.isAssociation()
|
||||
? pluralAttributeBinding.getFetchMode()
|
||||
: FetchMode.DEFAULT;
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.hibernate.cfg.Environment;
|
|||
import org.hibernate.engine.OptimisticLockStyle;
|
||||
import org.hibernate.engine.internal.Versioning;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.ValueInclusion;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
|
@ -234,7 +235,7 @@ public class EntityMetamodel implements Serializable {
|
|||
hasLazy = true;
|
||||
}
|
||||
|
||||
if ( properties[i].getCascadeStyle() != CascadeStyle.NONE ) {
|
||||
if ( properties[i].getCascadeStyle() != CascadeStyles.NONE ) {
|
||||
foundCascade = true;
|
||||
}
|
||||
|
||||
|
@ -484,7 +485,7 @@ public class EntityMetamodel implements Serializable {
|
|||
hasLazy = true;
|
||||
}
|
||||
|
||||
if ( properties[i].getCascadeStyle() != CascadeStyle.NONE ) {
|
||||
if ( properties[i].getCascadeStyle() != CascadeStyles.NONE ) {
|
||||
foundCascade = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.hibernate.MappingException;
|
|||
import org.hibernate.TransientObjectException;
|
||||
import org.hibernate.engine.internal.ForeignKeys;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
|
@ -270,7 +271,7 @@ public class AnyType extends AbstractType implements CompositeType, AssociationT
|
|||
}
|
||||
}
|
||||
public CascadeStyle getCascadeStyle(int i) {
|
||||
return CascadeStyle.NONE;
|
||||
return CascadeStyles.NONE;
|
||||
}
|
||||
|
||||
public FetchMode getFetchMode(int i) {
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.hibernate.FetchMode;
|
|||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
|
@ -121,7 +122,7 @@ public class CompositeCustomType extends AbstractType implements CompositeType,
|
|||
}
|
||||
|
||||
public CascadeStyle getCascadeStyle(int i) {
|
||||
return CascadeStyle.NONE;
|
||||
return CascadeStyles.NONE;
|
||||
}
|
||||
|
||||
public FetchMode getFetchMode(int i) {
|
||||
|
|
|
@ -24,20 +24,22 @@
|
|||
package org.hibernate.type;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.MapKeyEnumerated;
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
import org.hibernate.usertype.DynamicParameterizedType;
|
||||
import org.hibernate.usertype.EnhancedUserType;
|
||||
import org.hibernate.usertype.ParameterizedType;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Enum type mapper
|
||||
|
@ -49,7 +51,7 @@ import org.hibernate.usertype.ParameterizedType;
|
|||
* @author Hardy Ferentschik
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public class EnumType implements EnhancedUserType, ParameterizedType, Serializable {
|
||||
public class EnumType implements EnhancedUserType, DynamicParameterizedType, Serializable {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, EnumType.class.getName());
|
||||
|
||||
|
@ -168,17 +170,37 @@ public class EnumType implements EnhancedUserType, ParameterizedType, Serializab
|
|||
}
|
||||
|
||||
public void setParameterValues(Properties parameters) {
|
||||
String enumClassName = parameters.getProperty( ENUM );
|
||||
try {
|
||||
enumClass = ReflectHelper.classForName( enumClassName, this.getClass() ).asSubclass( Enum.class );
|
||||
}
|
||||
catch ( ClassNotFoundException exception ) {
|
||||
throw new HibernateException( "Enum class not found", exception );
|
||||
}
|
||||
ParameterType reader = (ParameterType) parameters.get( PARAMETER_TYPE );
|
||||
|
||||
String type = parameters.getProperty( TYPE );
|
||||
if ( type != null ) {
|
||||
sqlType = Integer.decode( type );
|
||||
if ( reader != null ) {
|
||||
enumClass = reader.getReturnedClass().asSubclass( Enum.class );
|
||||
|
||||
javax.persistence.EnumType enumType = getEnumType( reader );
|
||||
if ( enumType != null ) {
|
||||
if ( javax.persistence.EnumType.ORDINAL.equals( enumType ) ) {
|
||||
sqlType = Types.INTEGER;
|
||||
}
|
||||
else if ( javax.persistence.EnumType.STRING.equals( enumType ) ) {
|
||||
sqlType = Types.VARCHAR;
|
||||
}
|
||||
else {
|
||||
throw new AssertionFailure( "Unknown EnumType: " + enumType );
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
String enumClassName = (String) parameters.get( ENUM );
|
||||
try {
|
||||
enumClass = ReflectHelper.classForName( enumClassName, this.getClass() ).asSubclass( Enum.class );
|
||||
}
|
||||
catch ( ClassNotFoundException exception ) {
|
||||
throw new HibernateException( "Enum class not found", exception );
|
||||
}
|
||||
|
||||
String type = (String) parameters.get( TYPE );
|
||||
if ( type != null ) {
|
||||
sqlType = Integer.decode( type );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,4 +256,30 @@ public class EnumType implements EnhancedUserType, ParameterizedType, Serializab
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
private javax.persistence.EnumType getEnumType(ParameterType reader) {
|
||||
javax.persistence.EnumType enumType = null;
|
||||
if ( reader.isPrimaryKey() ) {
|
||||
MapKeyEnumerated enumAnn = getAnnotation( reader.getAnnotationsMethod(), MapKeyEnumerated.class );
|
||||
if ( enumAnn != null ) {
|
||||
enumType = enumAnn.value();
|
||||
}
|
||||
}
|
||||
else {
|
||||
Enumerated enumAnn = getAnnotation( reader.getAnnotationsMethod(), Enumerated.class );
|
||||
if ( enumAnn != null ) {
|
||||
enumType = enumAnn.value();
|
||||
}
|
||||
}
|
||||
return enumType;
|
||||
}
|
||||
|
||||
private <T extends Annotation> T getAnnotation(Annotation[] annotations, Class<T> anClass) {
|
||||
for ( Annotation annotation : annotations ) {
|
||||
if ( anClass.isInstance( annotation ) ) {
|
||||
return (T) annotation;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package org.hibernate.usertype;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* Types who implements this interface will have in the setParameterValues an
|
||||
* instance of the class DynamicParameterizedType$ParameterType instead of
|
||||
* the key PARAMETER_TYPE = "org.hibernate.type.ParameterType"
|
||||
*
|
||||
* The interface ParameterType provides some methods to read information
|
||||
* dynamically for build the type
|
||||
*
|
||||
* @author Janario Oliveira
|
||||
*/
|
||||
public interface DynamicParameterizedType extends ParameterizedType {
|
||||
public static final String PARAMETER_TYPE = "org.hibernate.type.ParameterType";
|
||||
|
||||
public static final String IS_DYNAMIC = "org.hibernate.type.ParameterType.dynamic";
|
||||
|
||||
public static final String RETURNED_CLASS = "org.hibernate.type.ParameterType.returnedClass";
|
||||
public static final String IS_PRIMARY_KEY = "org.hibernate.type.ParameterType.primaryKey";
|
||||
public static final String ENTITY = "org.hibernate.type.ParameterType.entityClass";
|
||||
public static final String PROPERTY = "org.hibernate.type.ParameterType.propertyName";
|
||||
public static final String ACCESS_TYPE = "org.hibernate.type.ParameterType.accessType";
|
||||
|
||||
public static interface ParameterType {
|
||||
|
||||
public Class getReturnedClass();
|
||||
|
||||
public Annotation[] getAnnotationsMethod();
|
||||
|
||||
public String getCatalog();
|
||||
|
||||
public String getSchema();
|
||||
|
||||
public String getTable();
|
||||
|
||||
public boolean isPrimaryKey();
|
||||
|
||||
public String[] getColumns();
|
||||
|
||||
}
|
||||
}
|
|
@ -1,186 +0,0 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.jboss.jandex;
|
||||
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Aggregates information from multiple {@link Index} instances.
|
||||
*
|
||||
* @author John Bailey
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class CompositeIndex implements IndexResult {
|
||||
private final List<Index> indexes;
|
||||
|
||||
public CompositeIndex(Index... indexes) {
|
||||
this( Arrays.asList( indexes ) );
|
||||
}
|
||||
|
||||
public CompositeIndex(List<Index> indexes) {
|
||||
this.indexes = indexes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<AnnotationInstance> getAnnotations(DotName annotationName) {
|
||||
final Set<AnnotationInstance> allInstances = new HashSet<AnnotationInstance>();
|
||||
for (Index index : indexes) {
|
||||
copy( index.getAnnotations( annotationName ), allInstances );
|
||||
}
|
||||
return Collections.unmodifiableSet( allInstances );
|
||||
}
|
||||
|
||||
private <T> void copy(Collection<T> source, Collection<T> target) {
|
||||
if ( source != null ) {
|
||||
target.addAll( source );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<ClassInfo> getKnownClasses() {
|
||||
final List<ClassInfo> allKnown = new ArrayList<ClassInfo>();
|
||||
for ( Index index : indexes ) {
|
||||
copy( index.getKnownClasses(), allKnown );
|
||||
}
|
||||
return Collections.unmodifiableCollection( allKnown );
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClassInfo getClassByName(DotName className) {
|
||||
for ( Index index : indexes ) {
|
||||
final ClassInfo info = index.getClassByName( className );
|
||||
if ( info != null ) {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<ClassInfo> getKnownDirectSubclasses(DotName className) {
|
||||
final Set<ClassInfo> allKnown = new HashSet<ClassInfo>();
|
||||
for ( Index index : indexes ) {
|
||||
copy( index.getKnownDirectSubclasses( className ), allKnown );
|
||||
}
|
||||
return Collections.unmodifiableSet( allKnown );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<ClassInfo> getAllKnownSubclasses(final DotName className) {
|
||||
final Set<ClassInfo> allKnown = new HashSet<ClassInfo>();
|
||||
final Set<DotName> processedClasses = new HashSet<DotName>();
|
||||
getAllKnownSubClasses(className, allKnown, processedClasses);
|
||||
return allKnown;
|
||||
}
|
||||
|
||||
private void getAllKnownSubClasses(DotName className, Set<ClassInfo> allKnown, Set<DotName> processedClasses) {
|
||||
final Set<DotName> subClassesToProcess = new HashSet<DotName>();
|
||||
subClassesToProcess.add(className);
|
||||
while (!subClassesToProcess.isEmpty()) {
|
||||
final Iterator<DotName> toProcess = subClassesToProcess.iterator();
|
||||
DotName name = toProcess.next();
|
||||
toProcess.remove();
|
||||
processedClasses.add(name);
|
||||
getAllKnownSubClasses(name, allKnown, subClassesToProcess, processedClasses);
|
||||
}
|
||||
}
|
||||
|
||||
private void getAllKnownSubClasses(
|
||||
DotName name,
|
||||
Set<ClassInfo> allKnown,
|
||||
Set<DotName> subClassesToProcess,
|
||||
Set<DotName> processedClasses) {
|
||||
for ( Index index : indexes ) {
|
||||
final Collection<ClassInfo> list = index.getKnownDirectSubclasses( name );
|
||||
if ( list != null ) {
|
||||
for ( final ClassInfo clazz : list ) {
|
||||
final DotName className = clazz.name();
|
||||
if ( !processedClasses.contains( className ) ) {
|
||||
allKnown.add( clazz );
|
||||
subClassesToProcess.add( className );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<ClassInfo> getKnownDirectImplementors(DotName className) {
|
||||
final Set<ClassInfo> allKnown = new HashSet<ClassInfo>();
|
||||
for ( Index index : indexes ) {
|
||||
copy( index.getKnownDirectImplementors( className ), allKnown );
|
||||
}
|
||||
return Collections.unmodifiableSet(allKnown); }
|
||||
|
||||
@Override
|
||||
public Collection<ClassInfo> getAllKnownImplementors(DotName interfaceName) {
|
||||
final Set<ClassInfo> allKnown = new HashSet<ClassInfo>();
|
||||
final Set<DotName> subInterfacesToProcess = new HashSet<DotName>();
|
||||
final Set<DotName> processedClasses = new HashSet<DotName>();
|
||||
subInterfacesToProcess.add( interfaceName );
|
||||
while ( !subInterfacesToProcess.isEmpty() ) {
|
||||
final Iterator<DotName> toProcess = subInterfacesToProcess.iterator();
|
||||
DotName name = toProcess.next();
|
||||
toProcess.remove();
|
||||
processedClasses.add( name );
|
||||
getKnownImplementors( name, allKnown, subInterfacesToProcess, processedClasses );
|
||||
}
|
||||
return allKnown;
|
||||
}
|
||||
|
||||
private void getKnownImplementors(
|
||||
DotName name,
|
||||
Set<ClassInfo> allKnown,
|
||||
Set<DotName> subInterfacesToProcess,
|
||||
Set<DotName> processedClasses) {
|
||||
for (Index index : indexes) {
|
||||
final List<ClassInfo> list = index.getKnownDirectImplementors(name);
|
||||
if (list != null) {
|
||||
for (final ClassInfo clazz : list) {
|
||||
final DotName className = clazz.name();
|
||||
if (!processedClasses.contains(className)) {
|
||||
if ( Modifier.isInterface( clazz.flags() )) {
|
||||
subInterfacesToProcess.add(className);
|
||||
}
|
||||
else {
|
||||
if (!allKnown.contains(clazz)) {
|
||||
allKnown.add(clazz);
|
||||
processedClasses.add(className);
|
||||
getAllKnownSubClasses(className, allKnown, processedClasses);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
96
hibernate-core/src/test/java/org/hibernate/cache/spi/NaturalIdCacheKeyTest.java
vendored
Normal file
96
hibernate-core/src/test/java/org/hibernate/cache/spi/NaturalIdCacheKeyTest.java
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.cache.spi;
|
||||
|
||||
import static junit.framework.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.type.Type;
|
||||
import org.junit.Test;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
public class NaturalIdCacheKeyTest {
|
||||
@Test
|
||||
public void testSerializationRoundTrip() throws Exception {
|
||||
final EntityPersister entityPersister = mock(EntityPersister.class);
|
||||
final SessionImplementor sessionImplementor = mock(SessionImplementor.class);
|
||||
final SessionFactoryImplementor sessionFactoryImplementor = mock(SessionFactoryImplementor.class);
|
||||
final Type mockType = mock(Type.class);
|
||||
|
||||
when (entityPersister.getRootEntityName()).thenReturn("EntityName");
|
||||
|
||||
when(sessionImplementor.getFactory()).thenReturn(sessionFactoryImplementor);
|
||||
|
||||
when(entityPersister.getNaturalIdentifierProperties()).thenReturn(new int[] {0, 1, 2});
|
||||
when(entityPersister.getPropertyTypes()).thenReturn(new Type[] {
|
||||
mockType,
|
||||
mockType,
|
||||
mockType
|
||||
});
|
||||
|
||||
when(mockType.getHashCode(anyObject(), eq(sessionFactoryImplementor))).thenAnswer(new Answer<Object>() {
|
||||
@Override
|
||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||
return invocation.getArguments()[0].hashCode();
|
||||
}
|
||||
});
|
||||
|
||||
when(mockType.disassemble(anyObject(), eq(sessionImplementor), eq(null))).thenAnswer(new Answer<Object>() {
|
||||
@Override
|
||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||
return invocation.getArguments()[0];
|
||||
}
|
||||
});
|
||||
|
||||
final NaturalIdCacheKey key = new NaturalIdCacheKey(new Object[] {"a", "b", "c"}, entityPersister, sessionImplementor);
|
||||
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ObjectOutputStream oos = new ObjectOutputStream(baos);
|
||||
oos.writeObject(key);
|
||||
|
||||
final ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()));
|
||||
final NaturalIdCacheKey keyClone = (NaturalIdCacheKey)ois.readObject();
|
||||
|
||||
assertEquals(key, keyClone);
|
||||
assertEquals(key.hashCode(), keyClone.hashCode());
|
||||
assertEquals(key.toString(), keyClone.toString());
|
||||
assertEquals(key.getEntityName(), keyClone.getEntityName());
|
||||
assertArrayEquals(key.getNaturalIdValues(), keyClone.getNaturalIdValues());
|
||||
assertEquals(key.getTenantId(), keyClone.getTenantId());
|
||||
|
||||
}
|
||||
}
|
|
@ -29,6 +29,7 @@ import java.util.List;
|
|||
import org.hibernate.FetchMode;
|
||||
import org.hibernate.engine.FetchTiming;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.CascadeStyles;
|
||||
import org.hibernate.metamodel.MetadataSources;
|
||||
import org.hibernate.metamodel.internal.MetadataImpl;
|
||||
import org.hibernate.metamodel.spi.MetadataImplementor;
|
||||
|
@ -170,7 +171,7 @@ public abstract class AbstractBasicBindingTests extends BaseUnitTestCase {
|
|||
ManyToOneAttributeBinding manyToOneAttributeBinding = (ManyToOneAttributeBinding) attributeBinding;
|
||||
assertEquals( referencedEntityName, manyToOneAttributeBinding.getReferencedEntityName() );
|
||||
assertSame( referencedEntityBinding, manyToOneAttributeBinding.getReferencedEntityBinding() );
|
||||
assertSame( CascadeStyle.NONE, manyToOneAttributeBinding.getCascadeStyle() );
|
||||
assertSame( CascadeStyles.NONE, manyToOneAttributeBinding.getCascadeStyle() );
|
||||
assertTrue( manyToOneAttributeBinding.isLazy() );
|
||||
assertSame( FetchMode.SELECT, manyToOneAttributeBinding.getFetchMode() );
|
||||
assertSame( FetchTiming.DELAYED, manyToOneAttributeBinding.getFetchTiming() );
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.hibernate.IrrelevantEntity;
|
|||
import org.hibernate.Session;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.engine.transaction.spi.TransactionContext;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
||||
import org.hibernate.testing.TestForIssue;
|
||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||
|
||||
|
@ -241,12 +243,20 @@ public class SessionWithSharedConnectionTest extends BaseCoreFunctionalTestCase
|
|||
|
||||
EventListenerRegistry eventListenerRegistry = sessionFactory().getServiceRegistry().getService(EventListenerRegistry.class);
|
||||
//register a post commit listener
|
||||
eventListenerRegistry.appendListeners( EventType.POST_COMMIT_INSERT, new PostInsertEventListener(){
|
||||
@Override
|
||||
public void onPostInsert(PostInsertEvent event) {
|
||||
((IrrelevantEntity) event.getEntity()).setName( postCommitMessage );
|
||||
}
|
||||
});
|
||||
eventListenerRegistry.appendListeners(
|
||||
EventType.POST_COMMIT_INSERT,
|
||||
new PostInsertEventListener() {
|
||||
@Override
|
||||
public void onPostInsert(PostInsertEvent event) {
|
||||
((IrrelevantEntity) event.getEntity()).setName( postCommitMessage );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean requiresPostCommitHanding(EntityPersister persister) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
session.getTransaction().begin();
|
||||
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat, Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.test.annotations.entity;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.util.ConcurrentModificationException;
|
||||
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.hibernate.cfg.Configuration;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.testing.TestForIssue;
|
||||
import org.hibernate.testing.junit4.BaseUnitTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* @author Guenther Demetz
|
||||
*/
|
||||
public class HibernateAnnotationMappingTest extends BaseUnitTestCase {
|
||||
|
||||
@Test
|
||||
@TestForIssue( jiraKey = "HHH-7446" )
|
||||
public void testUniqueConstraintAnnotationOnNaturalIds() throws Exception {
|
||||
Configuration configuration = new Configuration();
|
||||
configuration.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
|
||||
configuration.addAnnotatedClass(Month.class);
|
||||
SessionFactory sf = null;
|
||||
try {
|
||||
sf = configuration.buildSessionFactory();
|
||||
sf.close();
|
||||
}
|
||||
catch (ConcurrentModificationException e) {
|
||||
fail(e.toString());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat, Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
package org.hibernate.test.annotations.entity;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.UniqueConstraint;
|
||||
|
||||
import org.hibernate.annotations.NaturalId;
|
||||
|
||||
/**
|
||||
* @author Guenther Demetz
|
||||
*/
|
||||
@Entity
|
||||
@Table(uniqueConstraints = {@UniqueConstraint(columnNames={"year", "month"})})
|
||||
// Remark: Without line above, hibernate creates the combined unique index as follows: unique (month, year)
|
||||
// It seems that hibernate orders the attributes in alphabetic order
|
||||
// We indeed want to have the inverted sequence: year, month
|
||||
// In this way queries with only year in the where-clause can take advantage of this index
|
||||
// N.B.: Usually a user defines a combined index beginning with the most discriminating property
|
||||
public class Month {
|
||||
|
||||
@Id @GeneratedValue
|
||||
private int id;
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@NaturalId
|
||||
private int year;
|
||||
|
||||
@NaturalId
|
||||
private int month;
|
||||
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package org.hibernate.test.annotations.enumerated;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EnumType;
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.Id;
|
||||
import org.hibernate.annotations.Type;
|
||||
import org.hibernate.annotations.TypeDef;
|
||||
import org.hibernate.annotations.TypeDefs;
|
||||
|
||||
/**
|
||||
* @author Janario Oliveira
|
||||
*/
|
||||
@Entity
|
||||
@TypeDefs({ @TypeDef(typeClass = LastNumberType.class, defaultForType = EntityEnum.LastNumber.class) })
|
||||
public class EntityEnum {
|
||||
|
||||
enum Common {
|
||||
|
||||
A1, A2, B1, B2
|
||||
}
|
||||
|
||||
enum FirstLetter {
|
||||
|
||||
A_LETTER, B_LETTER, C_LETTER
|
||||
}
|
||||
|
||||
enum LastNumber {
|
||||
|
||||
NUMBER_1, NUMBER_2, NUMBER_3
|
||||
}
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private long id;
|
||||
private Common ordinal;
|
||||
@Enumerated(EnumType.STRING)
|
||||
private Common string;
|
||||
@Type(type = "org.hibernate.test.annotations.enumerated.FirstLetterType")
|
||||
private FirstLetter firstLetter;
|
||||
private LastNumber lastNumber;
|
||||
@Enumerated(EnumType.STRING)
|
||||
private LastNumber explicitOverridingImplicit;
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Common getOrdinal() {
|
||||
return ordinal;
|
||||
}
|
||||
|
||||
public void setOrdinal(Common ordinal) {
|
||||
this.ordinal = ordinal;
|
||||
}
|
||||
|
||||
public Common getString() {
|
||||
return string;
|
||||
}
|
||||
|
||||
public void setString(Common string) {
|
||||
this.string = string;
|
||||
}
|
||||
|
||||
public FirstLetter getFirstLetter() {
|
||||
return firstLetter;
|
||||
}
|
||||
|
||||
public void setFirstLetter(FirstLetter firstLetter) {
|
||||
this.firstLetter = firstLetter;
|
||||
}
|
||||
|
||||
public LastNumber getLastNumber() {
|
||||
return lastNumber;
|
||||
}
|
||||
|
||||
public void setLastNumber(LastNumber lastNumber) {
|
||||
this.lastNumber = lastNumber;
|
||||
}
|
||||
|
||||
public LastNumber getExplicitOverridingImplicit() {
|
||||
return explicitOverridingImplicit;
|
||||
}
|
||||
|
||||
public void setExplicitOverridingImplicit(LastNumber explicitOverridingImplicit) {
|
||||
this.explicitOverridingImplicit = explicitOverridingImplicit;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,337 @@
|
|||
package org.hibernate.test.annotations.enumerated;
|
||||
|
||||
import java.io.Serializable;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.cfg.Configuration;
|
||||
import org.hibernate.criterion.Restrictions;
|
||||
import org.hibernate.mapping.PersistentClass;
|
||||
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
|
||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||
import org.hibernate.type.EnumType;
|
||||
import org.hibernate.type.Type;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.hibernate.test.annotations.enumerated.EntityEnum.*;
|
||||
|
||||
/**
|
||||
* Test type definition for enum
|
||||
*
|
||||
* @author Janario Oliveira
|
||||
*/
|
||||
@FailureExpectedWithNewMetamodel
|
||||
public class EnumeratedTypeTest extends BaseCoreFunctionalTestCase {
|
||||
|
||||
@Test
|
||||
public void testTypeDefinition() {
|
||||
Configuration cfg = configuration();
|
||||
PersistentClass pc = cfg.getClassMapping( EntityEnum.class.getName() );
|
||||
|
||||
// ordinal default of EnumType
|
||||
Type ordinalEnum = pc.getProperty( "ordinal" ).getType();
|
||||
assertEquals( Common.class, ordinalEnum.getReturnedClass() );
|
||||
assertEquals( EnumType.class.getName(), ordinalEnum.getName() );
|
||||
|
||||
// string defined by Enumerated(STRING)
|
||||
Type stringEnum = pc.getProperty( "string" ).getType();
|
||||
assertEquals( Common.class, stringEnum.getReturnedClass() );
|
||||
assertEquals( EnumType.class.getName(), stringEnum.getName() );
|
||||
|
||||
// explicit defined by @Type
|
||||
Type first = pc.getProperty( "firstLetter" ).getType();
|
||||
assertEquals( FirstLetter.class, first.getReturnedClass() );
|
||||
assertEquals( FirstLetterType.class.getName(), first.getName() );
|
||||
|
||||
// implicit defined by @TypeDef in somewhere
|
||||
Type last = pc.getProperty( "lastNumber" ).getType();
|
||||
assertEquals( LastNumber.class, last.getReturnedClass() );
|
||||
assertEquals( LastNumberType.class.getName(), last.getName() );
|
||||
|
||||
// implicit defined by @TypeDef in anywhere, but overrided by Enumerated(STRING)
|
||||
Type implicitOverrideExplicit = pc.getProperty( "explicitOverridingImplicit" ).getType();
|
||||
assertEquals( LastNumber.class, implicitOverrideExplicit.getReturnedClass() );
|
||||
assertEquals( EnumType.class.getName(), implicitOverrideExplicit.getName() );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeQuery() {
|
||||
Session session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
EntityEnum entityEnum = new EntityEnum();
|
||||
entityEnum.setOrdinal( Common.A2 );
|
||||
Serializable id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.ordinal=1" ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( Common.A2, entityEnum.getOrdinal() );
|
||||
// find parameter
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.ordinal=:ordinal" )
|
||||
.setParameter( "ordinal", Common.A2 ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( Common.A2, entityEnum.getOrdinal() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where ordinal=1" ).executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setString( Common.B1 );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.string='B1'" ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( Common.B1, entityEnum.getString() );
|
||||
// find parameter
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.string=:string" )
|
||||
.setParameter( "string", Common.B1 ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( Common.B1, entityEnum.getString() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where string='B1'" ).executeUpdate() );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setFirstLetter( FirstLetter.C_LETTER );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.firstLetter='C'" ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( FirstLetter.C_LETTER, entityEnum.getFirstLetter() );
|
||||
// find parameter
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.firstLetter=:firstLetter" )
|
||||
.setParameter( "firstLetter", FirstLetter.C_LETTER ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( FirstLetter.C_LETTER, entityEnum.getFirstLetter() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where firstLetter='C'" ).executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setLastNumber( LastNumber.NUMBER_1 );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.lastNumber='1'" ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( LastNumber.NUMBER_1, entityEnum.getLastNumber() );
|
||||
// find parameter
|
||||
entityEnum = (EntityEnum) session.createQuery( "from EntityEnum ee where ee.lastNumber=:lastNumber" )
|
||||
.setParameter( "lastNumber", LastNumber.NUMBER_1 ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( LastNumber.NUMBER_1, entityEnum.getLastNumber() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where lastNumber='1'" ).executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setExplicitOverridingImplicit( LastNumber.NUMBER_2 );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createQuery(
|
||||
"from EntityEnum ee where ee.explicitOverridingImplicit='NUMBER_2'" ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( LastNumber.NUMBER_2, entityEnum.getExplicitOverridingImplicit() );
|
||||
// find parameter
|
||||
entityEnum = (EntityEnum) session
|
||||
.createQuery( "from EntityEnum ee where ee.explicitOverridingImplicit=:override" )
|
||||
.setParameter( "override", LastNumber.NUMBER_2 ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( LastNumber.NUMBER_2, entityEnum.getExplicitOverridingImplicit() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where explicitOverridingImplicit='NUMBER_2'" )
|
||||
.executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeCriteria() {
|
||||
Session session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
EntityEnum entityEnum = new EntityEnum();
|
||||
entityEnum.setOrdinal( Common.A1 );
|
||||
Serializable id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createCriteria( EntityEnum.class )
|
||||
.add( Restrictions.eq( "ordinal", Common.A1 ) ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( Common.A1, entityEnum.getOrdinal() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where ordinal=0" ).executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setString( Common.B2 );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createCriteria( EntityEnum.class )
|
||||
.add( Restrictions.eq( "string", Common.B2 ) ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( Common.B2, entityEnum.getString() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where string='B2'" ).executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setFirstLetter( FirstLetter.A_LETTER );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createCriteria( EntityEnum.class )
|
||||
.add( Restrictions.eq( "firstLetter", FirstLetter.A_LETTER ) ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( FirstLetter.A_LETTER, entityEnum.getFirstLetter() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where firstLetter='A'" ).executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setLastNumber( LastNumber.NUMBER_3 );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createCriteria( EntityEnum.class )
|
||||
.add( Restrictions.eq( "lastNumber", LastNumber.NUMBER_3 ) ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( LastNumber.NUMBER_3, entityEnum.getLastNumber() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where lastNumber='3'" ).executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// **************
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// persist
|
||||
entityEnum = new EntityEnum();
|
||||
entityEnum.setExplicitOverridingImplicit( LastNumber.NUMBER_2 );
|
||||
id = session.save( entityEnum );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
session = openSession();
|
||||
session.getTransaction().begin();
|
||||
|
||||
// find
|
||||
entityEnum = (EntityEnum) session.createCriteria( EntityEnum.class )
|
||||
.add( Restrictions.eq( "explicitOverridingImplicit", LastNumber.NUMBER_2 ) ).uniqueResult();
|
||||
assertEquals( id, entityEnum.getId() );
|
||||
assertEquals( LastNumber.NUMBER_2, entityEnum.getExplicitOverridingImplicit() );
|
||||
// delete
|
||||
assertEquals( 1, session.createSQLQuery( "DELETE FROM EntityEnum where explicitOverridingImplicit='NUMBER_2'" )
|
||||
.executeUpdate() );
|
||||
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class[] getAnnotatedClasses() {
|
||||
return new Class[] { EntityEnum.class };
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package org.hibernate.test.annotations.enumerated;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
|
||||
/**
|
||||
* @author Janario Oliveira
|
||||
*/
|
||||
public class FirstLetterType extends org.hibernate.type.EnumType {
|
||||
|
||||
@Override
|
||||
public int[] sqlTypes() {
|
||||
return new int[] { Types.VARCHAR };
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(ResultSet rs, String[] names, SessionImplementor session, Object owner)
|
||||
throws HibernateException, SQLException {
|
||||
String persistValue = (String) rs.getObject( names[0] );
|
||||
if ( rs.wasNull() ) {
|
||||
return null;
|
||||
}
|
||||
return Enum.valueOf( returnedClass(), persistValue + "_LETTER" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nullSafeSet(PreparedStatement st, Object value, int index, SessionImplementor session)
|
||||
throws HibernateException, SQLException {
|
||||
if ( value == null ) {
|
||||
st.setNull( index, sqlTypes()[0] );
|
||||
}
|
||||
else {
|
||||
String enumString = ( (Enum<?>) value ).name();
|
||||
st.setObject( index, enumString.charAt( 0 ), sqlTypes()[0] );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package org.hibernate.test.annotations.enumerated;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
|
||||
/**
|
||||
* @author Janario Oliveira
|
||||
*/
|
||||
public class LastNumberType extends org.hibernate.type.EnumType {
|
||||
|
||||
@Override
|
||||
public int[] sqlTypes() {
|
||||
return new int[] { Types.VARCHAR };
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(ResultSet rs, String[] names, SessionImplementor session, Object owner)
|
||||
throws HibernateException, SQLException {
|
||||
String persistValue = (String) rs.getObject( names[0] );
|
||||
if ( rs.wasNull() ) {
|
||||
return null;
|
||||
}
|
||||
return Enum.valueOf( returnedClass(), "NUMBER_" + persistValue );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nullSafeSet(PreparedStatement st, Object value, int index, SessionImplementor session)
|
||||
throws HibernateException, SQLException {
|
||||
if ( value == null ) {
|
||||
st.setNull( index, sqlTypes()[0] );
|
||||
}
|
||||
else {
|
||||
|
||||
String enumString = ( (Enum<?>) value ).name();
|
||||
st.setObject( index, enumString.charAt( enumString.length() - 1 ), sqlTypes()[0] );
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue