Merge branch 'wip/6.0_merge_6' into wip/6.0
This commit is contained in:
commit
2c6e000537
|
@ -0,0 +1,72 @@
|
||||||
|
@Library('hibernate-jenkins-pipeline-helpers@1.5') _
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
label 'LongDuration'
|
||||||
|
}
|
||||||
|
tools {
|
||||||
|
jdk 'OpenJDK 8 Latest'
|
||||||
|
}
|
||||||
|
stages {
|
||||||
|
stage('Build') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
||||||
|
docker.image('openjdk:8-jdk').pull()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dir('hibernate') {
|
||||||
|
checkout scm
|
||||||
|
sh """ \
|
||||||
|
./gradlew publishToMavenLocal
|
||||||
|
"""
|
||||||
|
script {
|
||||||
|
env.HIBERNATE_VERSION = sh (
|
||||||
|
script: "grep hibernateVersion gradle/version.properties|cut -d'=' -f2",
|
||||||
|
returnStdout: true
|
||||||
|
).trim()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dir('tck') {
|
||||||
|
checkout changelog: false, poll: false, scm: [$class: 'GitSCM', branches: [[name: '*/main']], extensions: [], userRemoteConfigs: [[url: 'https://github.com/hibernate/jakarta-tck-runner.git']]]
|
||||||
|
sh """ \
|
||||||
|
cd jpa-2.2; docker build -t jakarta-tck-runner .
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Run TCK') {
|
||||||
|
steps {
|
||||||
|
sh """ \
|
||||||
|
docker rm -f tck || true
|
||||||
|
docker run -v ~/.m2/repository/org/hibernate:/root/.m2/repository/org/hibernate:z -e NO_SLEEP=true -e HIBERNATE_VERSION=$HIBERNATE_VERSION --name tck jakarta-tck-runner
|
||||||
|
docker cp tck:/tck/persistence-tck/tmp/JTreport/ ./JTreport
|
||||||
|
"""
|
||||||
|
archiveArtifacts artifacts: 'JTreport/**'
|
||||||
|
script {
|
||||||
|
failures = sh (
|
||||||
|
script: """ \
|
||||||
|
while read line; do
|
||||||
|
if [[ "\$line" != *"Passed." ]]; then
|
||||||
|
echo "\$line"
|
||||||
|
fi
|
||||||
|
done <JTreport/text/summary.txt
|
||||||
|
""",
|
||||||
|
returnStdout: true
|
||||||
|
).trim()
|
||||||
|
if ( !failures.isEmpty() ) {
|
||||||
|
echo "Some TCK tests failed:"
|
||||||
|
echo failures
|
||||||
|
currentBuild.result = 'FAILURE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
// Space-separated
|
||||||
|
notifyBuildResult maintainers: 'christian.beikov@gmail.com'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,72 @@
|
||||||
|
@Library('hibernate-jenkins-pipeline-helpers@1.5') _
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
label 'LongDuration'
|
||||||
|
}
|
||||||
|
tools {
|
||||||
|
jdk 'OpenJDK 8 Latest'
|
||||||
|
}
|
||||||
|
stages {
|
||||||
|
stage('Build') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
||||||
|
docker.image('openjdk:8-jdk').pull()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dir('hibernate') {
|
||||||
|
checkout scm
|
||||||
|
sh """ \
|
||||||
|
./gradlew publishToMavenLocal
|
||||||
|
"""
|
||||||
|
script {
|
||||||
|
env.HIBERNATE_VERSION = sh (
|
||||||
|
script: "grep hibernateVersion gradle/version.properties|cut -d'=' -f2",
|
||||||
|
returnStdout: true
|
||||||
|
).trim()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dir('tck') {
|
||||||
|
checkout changelog: false, poll: false, scm: [$class: 'GitSCM', branches: [[name: '*/main']], extensions: [], userRemoteConfigs: [[url: 'https://github.com/hibernate/jakarta-tck-runner.git']]]
|
||||||
|
sh """ \
|
||||||
|
cd jpa-3.0; docker build -t jakarta-tck-runner .
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Run TCK') {
|
||||||
|
steps {
|
||||||
|
sh """ \
|
||||||
|
docker rm -f tck || true
|
||||||
|
docker run -v ~/.m2/repository/org/hibernate:/root/.m2/repository/org/hibernate:z -e NO_SLEEP=true -e HIBERNATE_VERSION=$HIBERNATE_VERSION --name tck jakarta-tck-runner
|
||||||
|
docker cp tck:/tck/persistence-tck/tmp/JTreport/ ./JTreport
|
||||||
|
"""
|
||||||
|
archiveArtifacts artifacts: 'JTreport/**'
|
||||||
|
script {
|
||||||
|
failures = sh (
|
||||||
|
script: """ \
|
||||||
|
while read line; do
|
||||||
|
if [[ "\$line" != *"Passed." ]]; then
|
||||||
|
echo "\$line"
|
||||||
|
fi
|
||||||
|
done <JTreport/text/summary.txt
|
||||||
|
""",
|
||||||
|
returnStdout: true
|
||||||
|
).trim()
|
||||||
|
if ( !failures.isEmpty() ) {
|
||||||
|
echo "Some TCK tests failed:"
|
||||||
|
echo failures
|
||||||
|
currentBuild.result = 'FAILURE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
// Space-separated
|
||||||
|
notifyBuildResult maintainers: 'christian.beikov@gmail.com'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
Before Width: | Height: | Size: 87 KiB After Width: | Height: | Size: 79 KiB |
|
@ -45,12 +45,6 @@ public class DiscriminatorNotNullSingleTableTest extends BaseEntityManagerFuncti
|
||||||
@Test
|
@Test
|
||||||
public void test() {
|
public void test() {
|
||||||
doInJPA( this::entityManagerFactory, entityManager -> {
|
doInJPA( this::entityManagerFactory, entityManager -> {
|
||||||
entityManager.unwrap( Session.class ).doWork( connection -> {
|
|
||||||
try(Statement statement = connection.createStatement()) {
|
|
||||||
statement.executeUpdate( "ALTER TABLE Account ALTER COLUMN DTYPE SET NULL" );
|
|
||||||
}
|
|
||||||
} );
|
|
||||||
|
|
||||||
//tag::entity-inheritance-single-table-discriminator-value-persist-example[]
|
//tag::entity-inheritance-single-table-discriminator-value-persist-example[]
|
||||||
DebitAccount debitAccount = new DebitAccount();
|
DebitAccount debitAccount = new DebitAccount();
|
||||||
debitAccount.setId( 1L );
|
debitAccount.setId( 1L );
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Keep all these properties in sync unless you know what you are doing!
|
# Keep all these properties in sync unless you know what you are doing!
|
||||||
org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8
|
org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8
|
||||||
# Needs add-opens because of https://github.com/gradle/gradle/issues/15538
|
toolchain.compiler.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8
|
||||||
toolchain.compiler.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8 --add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED
|
|
||||||
toolchain.javadoc.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8
|
toolchain.javadoc.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8
|
||||||
toolchain.launcher.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8
|
toolchain.launcher.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=256m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ ext {
|
||||||
}
|
}
|
||||||
|
|
||||||
jpaVersion = new JpaVersion('2.2')
|
jpaVersion = new JpaVersion('2.2')
|
||||||
|
jakartaJpaVersion = new JpaVersion('3.0.0')
|
||||||
}
|
}
|
||||||
|
|
||||||
group = 'org.hibernate.orm'
|
group = 'org.hibernate.orm'
|
||||||
|
|
|
@ -158,6 +158,8 @@ else {
|
||||||
options.compilerArgs << gradle.ext.javaVersions.main.release.toString()
|
options.compilerArgs << gradle.ext.javaVersions.main.release.toString()
|
||||||
} else {
|
} else {
|
||||||
options.release = gradle.ext.javaVersions.main.release.asInt()
|
options.release = gradle.ext.javaVersions.main.release.asInt()
|
||||||
|
// Needs add-opens because of https://github.com/gradle/gradle/issues/15538
|
||||||
|
options.forkOptions.jvmArgs.addAll( ["--add-opens", "jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED"] )
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tasks.compileTestJava.configure {
|
tasks.compileTestJava.configure {
|
||||||
|
@ -168,6 +170,8 @@ else {
|
||||||
options.compilerArgs << gradle.ext.javaVersions.test.release.toString()
|
options.compilerArgs << gradle.ext.javaVersions.test.release.toString()
|
||||||
} else {
|
} else {
|
||||||
options.release = gradle.ext.javaVersions.test.release.asInt()
|
options.release = gradle.ext.javaVersions.test.release.asInt()
|
||||||
|
// Needs add-opens because of https://github.com/gradle/gradle/issues/15538
|
||||||
|
options.forkOptions.jvmArgs.addAll( ["--add-opens", "jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED"] )
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,12 +21,14 @@ ext {
|
||||||
hibernateValidatorVersion = '6.1.6.Final'
|
hibernateValidatorVersion = '6.1.6.Final'
|
||||||
validationApiVersion = '2.0.1.Final'
|
validationApiVersion = '2.0.1.Final'
|
||||||
elVersion = '3.0.1-b09'
|
elVersion = '3.0.1-b09'
|
||||||
|
hibernateValidatorJakartaVersion = '7.0.1.Final'
|
||||||
|
|
||||||
cdiVersion = '2.0'
|
cdiVersion = '2.0'
|
||||||
weldVersion = '3.1.5.Final'
|
weldVersion = '3.1.5.Final'
|
||||||
|
jakartaWeldVersion = '4.0.1.SP1'
|
||||||
|
|
||||||
javassistVersion = '3.27.0-GA'
|
javassistVersion = '3.27.0-GA'
|
||||||
byteBuddyVersion = '1.10.21'
|
byteBuddyVersion = '1.10.22'
|
||||||
|
|
||||||
agroalVersion = '1.9'
|
agroalVersion = '1.9'
|
||||||
|
|
||||||
|
@ -43,6 +45,7 @@ ext {
|
||||||
// We can't upgrade JAXB in Karaf (yet), but fortunately everything works fine with the version built in Karaf
|
// We can't upgrade JAXB in Karaf (yet), but fortunately everything works fine with the version built in Karaf
|
||||||
jaxbApiVersionOsgiRange = "[2.2,3)"
|
jaxbApiVersionOsgiRange = "[2.2,3)"
|
||||||
jaxbRuntimeVersion = '2.3.1'
|
jaxbRuntimeVersion = '2.3.1'
|
||||||
|
jakartaJaxbRuntimeVersion = '3.0.0'
|
||||||
|
|
||||||
//GraalVM
|
//GraalVM
|
||||||
graalvmVersion = '19.3.1'
|
graalvmVersion = '19.3.1'
|
||||||
|
@ -82,6 +85,18 @@ ext {
|
||||||
// required by JAXB from JDK 9 as it is not available anymore in JDK 9
|
// required by JAXB from JDK 9 as it is not available anymore in JDK 9
|
||||||
activation: 'javax.activation:javax.activation-api:1.2.0',
|
activation: 'javax.activation:javax.activation-api:1.2.0',
|
||||||
|
|
||||||
|
// jakarta
|
||||||
|
jakarta_jpa: "jakarta.persistence:jakarta.persistence-api:${project.jakartaJpaVersion}",
|
||||||
|
jakarta_jta: 'jakarta.transaction:jakarta.transaction-api:2.0.0',
|
||||||
|
jakarta_validation: 'jakarta.validation:jakarta.validation-api:3.0.0',
|
||||||
|
jakarta_jacc: 'jakarta.authorization:jakarta.authorization-api:2.0.0',
|
||||||
|
jakarta_interceptor: 'jakarta.interceptor:jakarta.interceptor-api:2.0.0',
|
||||||
|
jakarta_activation: 'jakarta.activation:jakarta.activation-api:2.0.1',
|
||||||
|
jakarta_resource: 'jakarta.resource:jakarta.resource-api:2.0.0',
|
||||||
|
jakarta_jaxb_api: 'jakarta.xml.bind:jakarta.xml.bind-api:3.0.0',
|
||||||
|
jakarta_jaxb_runtime: "org.glassfish.jaxb:jaxb-runtime:${jakartaJaxbRuntimeVersion}",
|
||||||
|
jakarta_cdi: 'jakarta.enterprise:jakarta.enterprise.cdi-api:3.0.0',
|
||||||
|
|
||||||
// logging
|
// logging
|
||||||
logging: 'org.jboss.logging:jboss-logging:3.4.1.Final',
|
logging: 'org.jboss.logging:jboss-logging:3.4.1.Final',
|
||||||
logging_annotations: 'org.jboss.logging:jboss-logging-annotations:2.1.0.Final',
|
logging_annotations: 'org.jboss.logging:jboss-logging-annotations:2.1.0.Final',
|
||||||
|
@ -140,7 +155,10 @@ ext {
|
||||||
|
|
||||||
informix: 'com.ibm.informix:jdbc:4.10.12',
|
informix: 'com.ibm.informix:jdbc:4.10.12',
|
||||||
firebird: 'org.firebirdsql.jdbc:jaybird:4.0.3.java8',
|
firebird: 'org.firebirdsql.jdbc:jaybird:4.0.3.java8',
|
||||||
jboss_jta: "org.jboss.jbossts:jbossjta:4.16.4.Final",
|
jboss_jta: "org.jboss.narayana.jta:narayana-jta:5.11.2.Final",
|
||||||
|
jboss_tx_spi: "org.jboss:jboss-transaction-spi:7.6.1.Final",
|
||||||
|
jboss_jta_jakarta: "org.jboss.narayana.jta:narayana-jta-jakarta:5.11.2.Final",
|
||||||
|
jboss_tx_spi_jakarta: "org.jboss:jboss-transaction-spi-jakarta:7.6.1.Final",
|
||||||
xapool: "com.experlog:xapool:1.5.0",
|
xapool: "com.experlog:xapool:1.5.0",
|
||||||
mockito: 'org.mockito:mockito-core:2.19.1',
|
mockito: 'org.mockito:mockito-core:2.19.1',
|
||||||
mockito_inline: 'org.mockito:mockito-inline:2.19.1',
|
mockito_inline: 'org.mockito:mockito-inline:2.19.1',
|
||||||
|
@ -149,6 +167,10 @@ ext {
|
||||||
// EL required by Hibernate Validator at test runtime
|
// EL required by Hibernate Validator at test runtime
|
||||||
expression_language: "org.glassfish:javax.el:${elVersion}",
|
expression_language: "org.glassfish:javax.el:${elVersion}",
|
||||||
|
|
||||||
|
jakarta_validator:"org.hibernate.validator:hibernate-validator:${hibernateValidatorJakartaVersion}",
|
||||||
|
// EL required by Hibernate Validator at test runtime
|
||||||
|
jakarta_el: 'org.glassfish:jakarta.el:4.0.1',
|
||||||
|
|
||||||
c3p0: "com.mchange:c3p0:0.9.5.5",
|
c3p0: "com.mchange:c3p0:0.9.5.5",
|
||||||
ehcache: "net.sf.ehcache:ehcache:2.10.6",
|
ehcache: "net.sf.ehcache:ehcache:2.10.6",
|
||||||
ehcache3: "org.ehcache:ehcache:3.6.1",
|
ehcache3: "org.ehcache:ehcache:3.6.1",
|
||||||
|
@ -165,6 +187,7 @@ ext {
|
||||||
|
|
||||||
cdi: "javax.enterprise:cdi-api:${cdiVersion}",
|
cdi: "javax.enterprise:cdi-api:${cdiVersion}",
|
||||||
weld: "org.jboss.weld.se:weld-se-shaded:${weldVersion}",
|
weld: "org.jboss.weld.se:weld-se-shaded:${weldVersion}",
|
||||||
|
jakarta_weld: "org.jboss.weld.se:weld-se-shaded:${jakartaWeldVersion}",
|
||||||
|
|
||||||
assertj: "org.assertj:assertj-core:${assertjVersion}",
|
assertj: "org.assertj:assertj-core:${assertjVersion}",
|
||||||
|
|
||||||
|
@ -177,6 +200,8 @@ ext {
|
||||||
|
|
||||||
jboss_vfs: "org.jboss:jboss-vfs:3.2.11.Final",
|
jboss_vfs: "org.jboss:jboss-vfs:3.2.11.Final",
|
||||||
wildfly_transaction_client : 'org.wildfly.transaction:wildfly-transaction-client:1.1.7.Final',
|
wildfly_transaction_client : 'org.wildfly.transaction:wildfly-transaction-client:1.1.7.Final',
|
||||||
|
// todo (jakarta): update the version when it is released
|
||||||
|
wildfly_transaction_client_jakarta : 'org.wildfly.transaction:wildfly-transaction-client-jakarta:1.2.0.Final-SNAPSHOT',
|
||||||
|
|
||||||
jboss_ejb_spec_jar : 'org.jboss.spec.javax.ejb:jboss-ejb-api_3.2_spec:1.0.0.Final',
|
jboss_ejb_spec_jar : 'org.jboss.spec.javax.ejb:jboss-ejb-api_3.2_spec:1.0.0.Final',
|
||||||
jboss_annotation_spec_jar : 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final',
|
jboss_annotation_spec_jar : 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final',
|
||||||
|
|
|
@ -0,0 +1,186 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.apache.tools.ant.filters.ReplaceTokens
|
||||||
|
|
||||||
|
description = 'Hibernate O/RM implementation of the Jakarta Persistence specification'
|
||||||
|
|
||||||
|
apply from: rootProject.file( 'gradle/published-java-module.gradle' )
|
||||||
|
|
||||||
|
configurations {
|
||||||
|
tests {
|
||||||
|
description = 'Configuration for the produced test jar'
|
||||||
|
}
|
||||||
|
jakartaeeTransformJars
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
api libraries.jakarta_jpa
|
||||||
|
// This can now be made provided
|
||||||
|
// Could be made optional?
|
||||||
|
api libraries.byteBuddy
|
||||||
|
api libraries.antlr
|
||||||
|
api libraries.jakarta_jta
|
||||||
|
api libraries.jandex
|
||||||
|
api libraries.classmate
|
||||||
|
api libraries.jakarta_activation
|
||||||
|
|
||||||
|
compileOnly libraries.jakarta_jacc
|
||||||
|
compileOnly libraries.jakarta_validation
|
||||||
|
compileOnly libraries.ant
|
||||||
|
compileOnly libraries.jakarta_cdi
|
||||||
|
|
||||||
|
api libraries.dom4j
|
||||||
|
api libraries.commons_annotations
|
||||||
|
|
||||||
|
api libraries.jakarta_jaxb_api
|
||||||
|
api libraries.jakarta_jaxb_runtime
|
||||||
|
|
||||||
|
jakartaeeTransformJars 'biz.aQute.bnd:biz.aQute.bnd.transform:5.1.1',
|
||||||
|
'commons-cli:commons-cli:1.4',
|
||||||
|
'org.slf4j:slf4j-simple:1.7.30',
|
||||||
|
'org.slf4j:slf4j-api:1.7.26',
|
||||||
|
'org.eclipse.transformer:org.eclipse.transformer:0.2.0',
|
||||||
|
'org.eclipse.transformer:org.eclipse.transformer.cli:0.2.0'
|
||||||
|
|
||||||
|
testImplementation project(':hibernate-testing-jakarta')
|
||||||
|
testImplementation fileTree(dir: 'libs', include: '*.jar')
|
||||||
|
|
||||||
|
testImplementation libraries.shrinkwrap_api
|
||||||
|
testImplementation libraries.shrinkwrap
|
||||||
|
testImplementation libraries.jakarta_jacc
|
||||||
|
testImplementation libraries.jakarta_validation
|
||||||
|
testImplementation libraries.jandex
|
||||||
|
testImplementation libraries.classmate
|
||||||
|
testImplementation libraries.mockito
|
||||||
|
testImplementation libraries.mockito_inline
|
||||||
|
testImplementation libraries.jodaTime
|
||||||
|
testImplementation libraries.assertj
|
||||||
|
|
||||||
|
testImplementation libraries.jakarta_cdi
|
||||||
|
|
||||||
|
testImplementation( libraries.jakarta_validator ) {
|
||||||
|
// for test runtime
|
||||||
|
transitive = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// for testing stored procedure support
|
||||||
|
testImplementation libraries.derby
|
||||||
|
|
||||||
|
testRuntimeOnly 'org.hamcrest:hamcrest-all:1.3'
|
||||||
|
testRuntimeOnly "org.jboss.spec.javax.ejb:jboss-ejb-api_3.2_spec:1.0.0.Final"
|
||||||
|
testRuntimeOnly libraries.jakarta_el
|
||||||
|
testRuntimeOnly 'jaxen:jaxen:1.1'
|
||||||
|
testRuntimeOnly libraries.javassist
|
||||||
|
testRuntimeOnly libraries.byteBuddy
|
||||||
|
testRuntimeOnly libraries.jakarta_weld
|
||||||
|
testRuntimeOnly libraries.atomikos
|
||||||
|
testRuntimeOnly libraries.atomikos_jta
|
||||||
|
// todo (jakarta): replace this when the jakarta artifact is released
|
||||||
|
testRuntimeOnly project(':hibernate-transaction-client')
|
||||||
|
// testRuntimeOnly libraries.wildfly_transaction_client_jakarta
|
||||||
|
|
||||||
|
testImplementation libraries.shrinkwrap_descriptors_api_javaee
|
||||||
|
testImplementation libraries.shrinkwrap_descriptors_impl_javaee
|
||||||
|
|
||||||
|
testImplementation libraries.jboss_ejb_spec_jar
|
||||||
|
testImplementation libraries.jboss_annotation_spec_jar
|
||||||
|
}
|
||||||
|
|
||||||
|
jar {
|
||||||
|
mustRunAfter project(':hibernate-core').tasks.jar
|
||||||
|
mustRunAfter project(':hibernate-core').tasks.testJar
|
||||||
|
dependsOn project(':hibernate-core').tasks.jar
|
||||||
|
dependsOn project(':hibernate-core').tasks.testJar
|
||||||
|
def baseDir = project(':hibernate-core').buildDir
|
||||||
|
def baseJars = fileTree(baseDir).matching {include 'libs/*.jar' }
|
||||||
|
inputs.files(baseJars).skipWhenEmpty()
|
||||||
|
outputs.dir project.buildDir
|
||||||
|
doLast {
|
||||||
|
new File(project.buildDir, "libs").mkdirs()
|
||||||
|
fileTree(project.buildDir).matching { include 'libs/*.jar' }.each { delete it }
|
||||||
|
|
||||||
|
baseJars.each { bundleJar ->
|
||||||
|
def sourceJarPath = baseDir.path + '/libs/' + bundleJar.name
|
||||||
|
println 'Initial bundle jar name [ ' + sourceJarPath + ' ]'
|
||||||
|
|
||||||
|
def finalBundleJarName = project.buildDir.path + '/libs/' + bundleJar.name.replaceAll( 'hibernate-core', 'hibernate-core-jakarta' )
|
||||||
|
println 'Default jakarta final bundle jar name [ ' + finalBundleJarName + ' ]'
|
||||||
|
|
||||||
|
def transformerArgs = [
|
||||||
|
sourceJarPath, finalBundleJarName,
|
||||||
|
'-q', // quiet output
|
||||||
|
'-tr', new File(getProjectDir().getParentFile(), 'rules/jakarta-renames.properties').path,
|
||||||
|
'-tv', new File(getProjectDir().getParentFile(), 'rules/jakarta-versions.properties').path,
|
||||||
|
'-td', new File(getProjectDir().getParentFile(), 'rules/jakarta-direct.properties').path,
|
||||||
|
]
|
||||||
|
|
||||||
|
println 'Transformer options:'
|
||||||
|
transformerArgs.each {
|
||||||
|
println ' [ ' + it + ' ]'
|
||||||
|
}
|
||||||
|
|
||||||
|
javaexec {
|
||||||
|
classpath configurations.jakartaeeTransformJars
|
||||||
|
main = 'org.eclipse.transformer.jakarta.JakartaTransformer'
|
||||||
|
args = transformerArgs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task unpackTestJar(type: Copy) {
|
||||||
|
dependsOn jar
|
||||||
|
fileTree(project.buildDir).matching { include 'libs/*-test.jar' }.each {
|
||||||
|
def outputDir = file("${buildDir}/unpacked/" + it.name)
|
||||||
|
from zipTree(it)
|
||||||
|
into outputDir
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task copyBundleResources (type: Copy) {
|
||||||
|
dependsOn unpackTestJar
|
||||||
|
File unpackedDir = new File(project.buildDir, "libs/hibernate-core-jakarta-${project.version}-test.jar")
|
||||||
|
ext {
|
||||||
|
bundlesTargetDir = file( "${buildDir}/bundles" )
|
||||||
|
bundleTokens = dbBundle[db]
|
||||||
|
ext.bundleTokens['buildDirName'] = buildDir.absolutePath
|
||||||
|
}
|
||||||
|
from file("${buildDir}/unpacked/${unpackedDir.name}/templates")
|
||||||
|
into ext.bundlesTargetDir
|
||||||
|
// There are persistence.xml files referencing jar files through their absolute path so we
|
||||||
|
// have to replace 'hibernate-core/hibernate-core' in the path with 'hibernate-core/hibernate-core-jakarta'
|
||||||
|
filter { line ->
|
||||||
|
line.replaceAll( 'hibernate-core/hibernate-core', 'hibernate-core/hibernate-core-jakarta' )
|
||||||
|
}
|
||||||
|
doFirst {
|
||||||
|
ext.bundlesTargetDir.mkdirs()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
processTestResources.dependsOn copyBundleResources
|
||||||
|
|
||||||
|
artifacts {
|
||||||
|
tests new File(project.buildDir, "libs/hibernate-core-jakarta-${project.version}-test.jar")
|
||||||
|
}
|
||||||
|
|
||||||
|
test {
|
||||||
|
fileTree(project.buildDir).matching { include 'libs/*-test.jar' }.each {
|
||||||
|
def outputDir = file("${buildDir}/unpacked/" + it.name)
|
||||||
|
testClassesDirs += files(outputDir)
|
||||||
|
classpath += files(outputDir)
|
||||||
|
}
|
||||||
|
systemProperty 'file.encoding', 'utf-8'
|
||||||
|
|
||||||
|
if ( gradle.ext.javaVersions.test.launcher.asInt() >= 9 ) {
|
||||||
|
// See org.hibernate.boot.model.naming.NamingHelperTest.DefaultCharset.set
|
||||||
|
jvmArgs( ['--add-opens', 'java.base/java.nio.charset=ALL-UNNAMED'] )
|
||||||
|
// Weld needs this to generate proxies
|
||||||
|
jvmArgs( ['--add-opens', 'java.base/java.security=ALL-UNNAMED'] )
|
||||||
|
jvmArgs( ['--add-opens', 'java.base/java.lang=ALL-UNNAMED'] )
|
||||||
|
}
|
||||||
|
}
|
|
@ -39,7 +39,7 @@ sourceSets {
|
||||||
// resources inherently exclude sources
|
// resources inherently exclude sources
|
||||||
test {
|
test {
|
||||||
resources {
|
resources {
|
||||||
setSrcDirs( ['src/test/java','src/test/resources'] )
|
setSrcDirs( ['src/test/java','src/test/resources','src/test/bundles'] )
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,6 +136,7 @@ jar {
|
||||||
// For JPA, we don't want to target the automatically generated range, but a specific version
|
// For JPA, we don't want to target the automatically generated range, but a specific version
|
||||||
"javax.persistence;version=\"${project.jpaVersion.osgiName}\"",
|
"javax.persistence;version=\"${project.jpaVersion.osgiName}\"",
|
||||||
// optionals
|
// optionals
|
||||||
|
'jakarta.persistence.spi;resolution:=optional',
|
||||||
'javax.management;resolution:=optional',
|
'javax.management;resolution:=optional',
|
||||||
'javax.naming.event;resolution:=optional',
|
'javax.naming.event;resolution:=optional',
|
||||||
'javax.naming.spi;resolution:=optional',
|
'javax.naming.spi;resolution:=optional',
|
||||||
|
@ -177,7 +178,7 @@ sourceSets.main {
|
||||||
|
|
||||||
// resources inherently exclude sources
|
// resources inherently exclude sources
|
||||||
sourceSets.test.resources {
|
sourceSets.test.resources {
|
||||||
setSrcDirs( ['src/test/java','src/test/resources'] )
|
setSrcDirs( ['src/test/java','src/test/resources','src/test/bundles'] )
|
||||||
}
|
}
|
||||||
|
|
||||||
idea {
|
idea {
|
||||||
|
@ -219,7 +220,7 @@ task copyBundleResources (type: Copy) {
|
||||||
ext.bundleTokens['buildDirName'] = buildDir.absolutePath
|
ext.bundleTokens['buildDirName'] = buildDir.absolutePath
|
||||||
}
|
}
|
||||||
|
|
||||||
from file('src/test/bundles')
|
from file('src/test/bundles/templates')
|
||||||
into ext.bundlesTargetDir
|
into ext.bundlesTargetDir
|
||||||
filter( ReplaceTokens, tokens: ext.bundleTokens)
|
filter( ReplaceTokens, tokens: ext.bundleTokens)
|
||||||
|
|
||||||
|
@ -227,9 +228,12 @@ task copyBundleResources (type: Copy) {
|
||||||
ext.bundlesTargetDir.mkdirs()
|
ext.bundlesTargetDir.mkdirs()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
processTestResources.dependsOn copyBundleResources
|
// Not sure if this is a proper fix, but if we use dependsOn instead of finalizedBy,
|
||||||
|
// we will end up a test JAR with bundle files that where variables are replaced
|
||||||
|
processTestResources.finalizedBy copyBundleResources
|
||||||
|
|
||||||
task testJar(type: Jar, dependsOn: testClasses) {
|
task testJar(type: Jar, dependsOn: testClasses) {
|
||||||
|
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
|
||||||
archiveClassifier.set( 'test' )
|
archiveClassifier.set( 'test' )
|
||||||
from sourceSets.test.output
|
from sourceSets.test.output
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,8 @@ package org.hibernate;
|
||||||
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
|
||||||
|
import org.hibernate.bytecode.enhance.spi.interceptor.BytecodeLazyAttributeInterceptor;
|
||||||
import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLazinessInterceptor;
|
import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLazinessInterceptor;
|
||||||
import org.hibernate.bytecode.enhance.spi.interceptor.LazyAttributeLoadingInterceptor;
|
|
||||||
import org.hibernate.collection.spi.PersistentCollection;
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
import org.hibernate.engine.HibernateIterator;
|
import org.hibernate.engine.HibernateIterator;
|
||||||
import org.hibernate.engine.jdbc.LobCreator;
|
import org.hibernate.engine.jdbc.LobCreator;
|
||||||
|
@ -202,11 +202,8 @@ public final class Hibernate {
|
||||||
|
|
||||||
if ( entity instanceof PersistentAttributeInterceptable ) {
|
if ( entity instanceof PersistentAttributeInterceptable ) {
|
||||||
PersistentAttributeInterceptor interceptor = ( (PersistentAttributeInterceptable) entity ).$$_hibernate_getInterceptor();
|
PersistentAttributeInterceptor interceptor = ( (PersistentAttributeInterceptable) entity ).$$_hibernate_getInterceptor();
|
||||||
if ( interceptor instanceof EnhancementAsProxyLazinessInterceptor ) {
|
if ( interceptor instanceof BytecodeLazyAttributeInterceptor ) {
|
||||||
return false;
|
return ( (BytecodeLazyAttributeInterceptor) interceptor ).isAttributeLoaded( propertyName );
|
||||||
}
|
|
||||||
if ( interceptor instanceof LazyAttributeLoadingInterceptor ) {
|
|
||||||
return ( (LazyAttributeLoadingInterceptor) interceptor ).isAttributeLoaded( propertyName );
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -93,6 +93,11 @@ public class QueryHints {
|
||||||
*/
|
*/
|
||||||
public static final String TIMEOUT_JPA = "javax.persistence.query.timeout";
|
public static final String TIMEOUT_JPA = "javax.persistence.query.timeout";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a JPA query timeout, which is defined in <b>milliseconds</b>.
|
||||||
|
*/
|
||||||
|
public static final String TIMEOUT_JAKARTA_JPA = "jakarta.persistence.query.timeout";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Available to apply lock mode to a native SQL query since JPA requires that
|
* Available to apply lock mode to a native SQL query since JPA requires that
|
||||||
* {@link javax.persistence.Query#setLockMode} throw an IllegalStateException if called for a native query.
|
* {@link javax.persistence.Query#setLockMode} throw an IllegalStateException if called for a native query.
|
||||||
|
|
|
@ -85,6 +85,7 @@ public class ClassLoaderAccessImpl implements ClassLoaderAccess {
|
||||||
// classes in any of these packages are safe to load through the "live" ClassLoader
|
// classes in any of these packages are safe to load through the "live" ClassLoader
|
||||||
return name.startsWith( "java." )
|
return name.startsWith( "java." )
|
||||||
|| name.startsWith( "javax." )
|
|| name.startsWith( "javax." )
|
||||||
|
|| name.startsWith( "jakarta." )
|
||||||
|| name.startsWith( "org.hibernate." );
|
|| name.startsWith( "org.hibernate." );
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -591,7 +591,7 @@ public class MetadataBuilderImpl implements MetadataBuilderImplementor, TypeCont
|
||||||
);
|
);
|
||||||
|
|
||||||
this.sharedCacheMode = configService.getSetting(
|
this.sharedCacheMode = configService.getSetting(
|
||||||
"javax.persistence.sharedCache.mode",
|
AvailableSettings.JPA_SHARED_CACHE_MODE,
|
||||||
new ConfigurationService.Converter<SharedCacheMode>() {
|
new ConfigurationService.Converter<SharedCacheMode>() {
|
||||||
@Override
|
@Override
|
||||||
public SharedCacheMode convert(Object value) {
|
public SharedCacheMode convert(Object value) {
|
||||||
|
@ -606,7 +606,24 @@ public class MetadataBuilderImpl implements MetadataBuilderImplementor, TypeCont
|
||||||
return SharedCacheMode.valueOf( value.toString() );
|
return SharedCacheMode.valueOf( value.toString() );
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
SharedCacheMode.UNSPECIFIED
|
configService.getSetting(
|
||||||
|
AvailableSettings.JAKARTA_JPA_SHARED_CACHE_MODE,
|
||||||
|
new ConfigurationService.Converter<SharedCacheMode>() {
|
||||||
|
@Override
|
||||||
|
public SharedCacheMode convert(Object value) {
|
||||||
|
if ( value == null ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( SharedCacheMode.class.isInstance( value ) ) {
|
||||||
|
return (SharedCacheMode) value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return SharedCacheMode.valueOf( value.toString() );
|
||||||
|
}
|
||||||
|
},
|
||||||
|
SharedCacheMode.UNSPECIFIED
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
this.defaultCacheAccessType = configService.getSetting(
|
this.defaultCacheAccessType = configService.getSetting(
|
||||||
|
|
|
@ -279,8 +279,14 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
||||||
( (ConfigurationServiceImpl) cfgService ).injectServices( (ServiceRegistryImplementor) serviceRegistry );
|
( (ConfigurationServiceImpl) cfgService ).injectServices( (ServiceRegistryImplementor) serviceRegistry );
|
||||||
}
|
}
|
||||||
|
|
||||||
this.beanManagerReference = configurationSettings.get( "javax.persistence.bean.manager" );
|
this.beanManagerReference = configurationSettings.getOrDefault(
|
||||||
this.validatorFactoryReference = configurationSettings.get( "javax.persistence.validation.factory" );
|
AvailableSettings.CDI_BEAN_MANAGER,
|
||||||
|
configurationSettings.get( AvailableSettings.JAKARTA_CDI_BEAN_MANAGER )
|
||||||
|
);
|
||||||
|
this.validatorFactoryReference = configurationSettings.getOrDefault(
|
||||||
|
AvailableSettings.JPA_VALIDATION_FACTORY,
|
||||||
|
configurationSettings.get( AvailableSettings.JAKARTA_JPA_VALIDATION_FACTORY )
|
||||||
|
);
|
||||||
|
|
||||||
this.sessionFactoryName = (String) configurationSettings.get( SESSION_FACTORY_NAME );
|
this.sessionFactoryName = (String) configurationSettings.get( SESSION_FACTORY_NAME );
|
||||||
this.sessionFactoryNameAlsoJndiName = cfgService.getSetting(
|
this.sessionFactoryNameAlsoJndiName = cfgService.getSetting(
|
||||||
|
|
|
@ -15,6 +15,7 @@ import java.util.Set;
|
||||||
import org.hibernate.LockMode;
|
import org.hibernate.LockMode;
|
||||||
import org.hibernate.bytecode.enhance.spi.CollectionTracker;
|
import org.hibernate.bytecode.enhance.spi.CollectionTracker;
|
||||||
import org.hibernate.bytecode.enhance.spi.LazyPropertyInitializer;
|
import org.hibernate.bytecode.enhance.spi.LazyPropertyInitializer;
|
||||||
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
import org.hibernate.engine.spi.SelfDirtinessTracker;
|
import org.hibernate.engine.spi.SelfDirtinessTracker;
|
||||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||||
import org.hibernate.engine.spi.Status;
|
import org.hibernate.engine.spi.Status;
|
||||||
|
@ -151,11 +152,18 @@ public class LazyAttributeLoadingInterceptor extends AbstractLazyLoadInterceptor
|
||||||
|
|
||||||
private void takeCollectionSizeSnapshot(Object target, String fieldName, Object value) {
|
private void takeCollectionSizeSnapshot(Object target, String fieldName, Object value) {
|
||||||
if ( value instanceof Collection && target instanceof SelfDirtinessTracker ) {
|
if ( value instanceof Collection && target instanceof SelfDirtinessTracker ) {
|
||||||
|
// This must be called first, so that we remember that there is a collection out there,
|
||||||
|
// even if we don't know its size (see below).
|
||||||
CollectionTracker tracker = ( (SelfDirtinessTracker) target ).$$_hibernate_getCollectionTracker();
|
CollectionTracker tracker = ( (SelfDirtinessTracker) target ).$$_hibernate_getCollectionTracker();
|
||||||
if ( tracker == null ) {
|
if ( tracker == null ) {
|
||||||
( (SelfDirtinessTracker) target ).$$_hibernate_clearDirtyAttributes();
|
( (SelfDirtinessTracker) target ).$$_hibernate_clearDirtyAttributes();
|
||||||
tracker = ( (SelfDirtinessTracker) target ).$$_hibernate_getCollectionTracker();
|
tracker = ( (SelfDirtinessTracker) target ).$$_hibernate_getCollectionTracker();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ( value instanceof PersistentCollection && !( (PersistentCollection) value ).wasInitialized() ) {
|
||||||
|
// Cannot take a snapshot of an un-initialized collection.
|
||||||
|
return;
|
||||||
|
}
|
||||||
tracker.add( fieldName, ( (Collection) value ).size() );
|
tracker.add( fieldName, ( (Collection) value ).size() );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,8 @@
|
||||||
package org.hibernate.bytecode.internal.bytebuddy;
|
package org.hibernate.bytecode.internal.bytebuddy;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.lang.reflect.Constructor;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -29,6 +31,7 @@ public class BasicProxyFactoryImpl implements BasicProxyFactory {
|
||||||
|
|
||||||
private final Class proxyClass;
|
private final Class proxyClass;
|
||||||
private final ProxyConfiguration.Interceptor interceptor;
|
private final ProxyConfiguration.Interceptor interceptor;
|
||||||
|
private final Constructor proxyClassConstructor;
|
||||||
|
|
||||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
public BasicProxyFactoryImpl(Class superClass, Class[] interfaces, ByteBuddyState byteBuddyState) {
|
public BasicProxyFactoryImpl(Class superClass, Class[] interfaces, ByteBuddyState byteBuddyState) {
|
||||||
|
@ -50,12 +53,18 @@ public class BasicProxyFactoryImpl implements BasicProxyFactory {
|
||||||
.intercept( byteBuddyState.getProxyDefinitionHelpers().getInterceptorFieldAccessor() )
|
.intercept( byteBuddyState.getProxyDefinitionHelpers().getInterceptorFieldAccessor() )
|
||||||
);
|
);
|
||||||
this.interceptor = new PassThroughInterceptor( proxyClass.getName() );
|
this.interceptor = new PassThroughInterceptor( proxyClass.getName() );
|
||||||
|
try {
|
||||||
|
proxyClassConstructor = proxyClass.getConstructor();
|
||||||
|
}
|
||||||
|
catch (NoSuchMethodException e) {
|
||||||
|
throw new AssertionFailure( "Could not access default constructor from newly generated basic proxy" );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getProxy() {
|
public Object getProxy() {
|
||||||
try {
|
try {
|
||||||
final ProxyConfiguration proxy = (ProxyConfiguration) proxyClass.newInstance();
|
final ProxyConfiguration proxy = (ProxyConfiguration) proxyClassConstructor.newInstance();
|
||||||
proxy.$$_hibernate_set_interceptor( this.interceptor );
|
proxy.$$_hibernate_set_interceptor( this.interceptor );
|
||||||
return proxy;
|
return proxy;
|
||||||
}
|
}
|
||||||
|
|
|
@ -146,6 +146,7 @@ import org.hibernate.cfg.annotations.Nullability;
|
||||||
import org.hibernate.cfg.annotations.PropertyBinder;
|
import org.hibernate.cfg.annotations.PropertyBinder;
|
||||||
import org.hibernate.cfg.annotations.QueryBinder;
|
import org.hibernate.cfg.annotations.QueryBinder;
|
||||||
import org.hibernate.cfg.annotations.TableBinder;
|
import org.hibernate.cfg.annotations.TableBinder;
|
||||||
|
import org.hibernate.cfg.internal.NullableDiscriminatorColumnSecondPass;
|
||||||
import org.hibernate.engine.OptimisticLockStyle;
|
import org.hibernate.engine.OptimisticLockStyle;
|
||||||
import org.hibernate.engine.spi.FilterDefinition;
|
import org.hibernate.engine.spi.FilterDefinition;
|
||||||
import org.hibernate.id.PersistentIdentifierGenerator;
|
import org.hibernate.id.PersistentIdentifierGenerator;
|
||||||
|
@ -1556,6 +1557,8 @@ public final class AnnotationBinder {
|
||||||
if ( LOG.isTraceEnabled() ) {
|
if ( LOG.isTraceEnabled() ) {
|
||||||
LOG.tracev( "Setting discriminator for entity {0}", rootClass.getEntityName() );
|
LOG.tracev( "Setting discriminator for entity {0}", rootClass.getEntityName() );
|
||||||
}
|
}
|
||||||
|
context.getMetadataCollector().addSecondPass(
|
||||||
|
new NullableDiscriminatorColumnSecondPass( rootClass.getEntityName() ) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -213,6 +213,185 @@ public interface AvailableSettings extends org.hibernate.jpa.AvailableSettings {
|
||||||
String CDI_BEAN_MANAGER = "javax.persistence.bean.manager";
|
String CDI_BEAN_MANAGER = "javax.persistence.bean.manager";
|
||||||
|
|
||||||
|
|
||||||
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
// Jakarta JPA defined settings
|
||||||
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The name of the {@link javax.persistence.spi.PersistenceProvider} implementor
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 9.4.3 and 8.2.1.4
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_PERSISTENCE_PROVIDER = "jakarta.persistence.provider";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The type of transactions supported by the entity managers.
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 9.4.3 and 8.2.1.2
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_TRANSACTION_TYPE = "jakarta.persistence.transactionType";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The JNDI name of a JTA {@link javax.sql.DataSource}.
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 9.4.3 and 8.2.1.5
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_JTA_DATASOURCE = "jakarta.persistence.jtaDataSource";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The JNDI name of a non-JTA {@link javax.sql.DataSource}.
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 9.4.3 and 8.2.1.5
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_NON_JTA_DATASOURCE = "jakarta.persistence.nonJtaDataSource";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The name of a JDBC driver to use to connect to the database.
|
||||||
|
* <p/>
|
||||||
|
* Used in conjunction with {@link #JPA_JDBC_URL}, {@link #JPA_JDBC_USER} and {@link #JPA_JDBC_PASSWORD}
|
||||||
|
* to define how to make connections to the database in lieu of
|
||||||
|
* a datasource (either {@link #JPA_JTA_DATASOURCE} or {@link #JPA_NON_JTA_DATASOURCE}).
|
||||||
|
* <p/>
|
||||||
|
* See section 8.2.1.9
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_JDBC_DRIVER = "jakarta.persistence.jdbc.driver";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The JDBC connection url to use to connect to the database.
|
||||||
|
* <p/>
|
||||||
|
* Used in conjunction with {@link #JPA_JDBC_DRIVER}, {@link #JPA_JDBC_USER} and {@link #JPA_JDBC_PASSWORD}
|
||||||
|
* to define how to make connections to the database in lieu of
|
||||||
|
* a datasource (either {@link #JPA_JTA_DATASOURCE} or {@link #JPA_NON_JTA_DATASOURCE}).
|
||||||
|
* <p/>
|
||||||
|
* See section 8.2.1.9
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_JDBC_URL = "jakarta.persistence.jdbc.url";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The JDBC connection user name.
|
||||||
|
* <p/>
|
||||||
|
* Used in conjunction with {@link #JPA_JDBC_DRIVER}, {@link #JPA_JDBC_URL} and {@link #JPA_JDBC_PASSWORD}
|
||||||
|
* to define how to make connections to the database in lieu of
|
||||||
|
* a datasource (either {@link #JPA_JTA_DATASOURCE} or {@link #JPA_NON_JTA_DATASOURCE}).
|
||||||
|
* <p/>
|
||||||
|
* See section 8.2.1.9
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_JDBC_USER = "jakarta.persistence.jdbc.user";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The JDBC connection password.
|
||||||
|
* <p/>
|
||||||
|
* Used in conjunction with {@link #JPA_JDBC_DRIVER}, {@link #JPA_JDBC_URL} and {@link #JPA_JDBC_USER}
|
||||||
|
* to define how to make connections to the database in lieu of
|
||||||
|
* a datasource (either {@link #JPA_JTA_DATASOURCE} or {@link #JPA_NON_JTA_DATASOURCE}).
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 section 8.2.1.9
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_JDBC_PASSWORD = "jakarta.persistence.jdbc.password";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to indicate whether second-level (what JPA terms shared cache) caching is
|
||||||
|
* enabled as per the rules defined in JPA 2 section 3.1.7.
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 9.4.3 and 8.2.1.7
|
||||||
|
* @see javax.persistence.SharedCacheMode
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_SHARED_CACHE_MODE = "jakarta.persistence.sharedCache.mode";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE : Not a valid EMF property...
|
||||||
|
* <p/>
|
||||||
|
* Used to indicate if the provider should attempt to retrieve requested data
|
||||||
|
* in the shared cache.
|
||||||
|
*
|
||||||
|
* @see javax.persistence.CacheRetrieveMode
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE ="jakarta.persistence.cache.retrieveMode";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE : Not a valid EMF property...
|
||||||
|
* <p/>
|
||||||
|
* Used to indicate if the provider should attempt to store data loaded from the database
|
||||||
|
* in the shared cache.
|
||||||
|
*
|
||||||
|
* @see javax.persistence.CacheStoreMode
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_SHARED_CACHE_STORE_MODE ="jakarta.persistence.cache.storeMode";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to indicate what form of automatic validation is in effect as per rules defined
|
||||||
|
* in JPA 2 section 3.6.1.1
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 9.4.3 and 8.2.1.8
|
||||||
|
* @see javax.persistence.ValidationMode
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_VALIDATION_MODE = "jakarta.persistence.validation.mode";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to pass along any discovered validator factory.
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_VALIDATION_FACTORY = "jakarta.persistence.validation.factory";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to coordinate with bean validators
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 section 8.2.1.9
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_PERSIST_VALIDATION_GROUP = "jakarta.persistence.validation.group.pre-persist";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to coordinate with bean validators
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 section 8.2.1.9
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_UPDATE_VALIDATION_GROUP = "jakarta.persistence.validation.group.pre-update";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to coordinate with bean validators
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 section 8.2.1.9
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_REMOVE_VALIDATION_GROUP = "jakarta.persistence.validation.group.pre-remove";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to request (hint) a pessimistic lock scope.
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 8.2.1.9 and 3.4.4.3
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_LOCK_SCOPE = "jakarta.persistence.lock.scope";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to request (hint) a pessimistic lock timeout (in milliseconds).
|
||||||
|
* <p/>
|
||||||
|
* See JPA 2 sections 8.2.1.9 and 3.4.4.3
|
||||||
|
*/
|
||||||
|
String JAKARTA_JPA_LOCK_TIMEOUT = "jakarta.persistence.lock.timeout";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to pass along the CDI BeanManager, if any, to be used.
|
||||||
|
*
|
||||||
|
* According to JPA, strictly, the BeanManager should be passed in
|
||||||
|
* at boot-time and be ready for use at that time. However not all
|
||||||
|
* environments can do this (WildFly e.g.). To accommodate such
|
||||||
|
* environments, Hibernate provides 2 options:
|
||||||
|
*
|
||||||
|
* * a proprietary CDI extension SPI (that we have proposed to
|
||||||
|
* the CDI spec group as a standard option) that can be used
|
||||||
|
* to provide delayed BeanManager access. To use this solution,
|
||||||
|
* the reference passed as the BeanManager during bootstrap
|
||||||
|
* should be typed as {@link ExtendedBeanManager}
|
||||||
|
* * delayed access to the BeanManager reference. Here, Hibernate
|
||||||
|
* will not access the reference passed as the BeanManager during
|
||||||
|
* bootstrap until it is first needed. Note however that this has
|
||||||
|
* the effect of delaying any deployment problems until after
|
||||||
|
* bootstrapping.
|
||||||
|
*
|
||||||
|
* This setting is used to configure Hibernate ORM's access to
|
||||||
|
* the BeanManager (either directly or via {@link ExtendedBeanManager}).
|
||||||
|
*/
|
||||||
|
String JAKARTA_CDI_BEAN_MANAGER = "jakarta.persistence.bean.manager";
|
||||||
|
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
// BootstrapServiceRegistry level settings
|
// BootstrapServiceRegistry level settings
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -1581,6 +1760,185 @@ public interface AvailableSettings extends org.hibernate.jpa.AvailableSettings {
|
||||||
*/
|
*/
|
||||||
String HBM2DDL_CREATE_SCHEMAS = "javax.persistence.create-database-schemas";
|
String HBM2DDL_CREATE_SCHEMAS = "javax.persistence.create-database-schemas";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setting to perform SchemaManagementTool actions against the database directly via JDBC
|
||||||
|
* automatically as part of the SessionFactory lifecycle. Valid options are defined by the
|
||||||
|
* {@link org.hibernate.tool.schema.Action} enum.
|
||||||
|
* <p/>
|
||||||
|
* Interpreted in combination with {@link #HBM2DDL_AUTO}. If no value is specified, the default
|
||||||
|
* is "none" ({@link org.hibernate.tool.schema.Action#NONE}).
|
||||||
|
*
|
||||||
|
* @see org.hibernate.tool.schema.Action
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_DATABASE_ACTION = "jakarta.persistence.schema-generation.database.action";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setting to perform SchemaManagementTool actions writing the commands into a DDL script file.
|
||||||
|
* Valid options are defined by the {@link org.hibernate.tool.schema.Action} enum.
|
||||||
|
* <p/>
|
||||||
|
* Interpreted in combination with {@link #HBM2DDL_AUTO}. If no value is specified, the default
|
||||||
|
* is "none" ({@link org.hibernate.tool.schema.Action#NONE}).
|
||||||
|
*
|
||||||
|
* @see org.hibernate.tool.schema.Action
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_SCRIPTS_ACTION = "jakarta.persistence.schema-generation.scripts.action";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows passing a specific {@link java.sql.Connection} instance to be used by SchemaManagementTool.
|
||||||
|
* <p/>
|
||||||
|
* May also be used to determine the values for {@value #HBM2DDL_DB_NAME},
|
||||||
|
* {@value #HBM2DDL_DB_MAJOR_VERSION} and {@value #HBM2DDL_DB_MINOR_VERSION}.
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_CONNECTION = "jakarta.persistence.schema-generation-connection";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the name of the database provider in cases where a Connection to the underlying database is
|
||||||
|
* not available (aka, mainly in generating scripts). In such cases, a value for this setting
|
||||||
|
* *must* be specified.
|
||||||
|
* <p/>
|
||||||
|
* The value of this setting is expected to match the value returned by
|
||||||
|
* {@link java.sql.DatabaseMetaData#getDatabaseProductName()} for the target database.
|
||||||
|
* <p/>
|
||||||
|
* Additionally specifying {@value #HBM2DDL_DB_MAJOR_VERSION} and/or {@value #HBM2DDL_DB_MINOR_VERSION}
|
||||||
|
* may be required to understand exactly how to generate the required schema commands.
|
||||||
|
*
|
||||||
|
* @see #HBM2DDL_DB_MAJOR_VERSION
|
||||||
|
* @see #HBM2DDL_DB_MINOR_VERSION
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("JavaDoc")
|
||||||
|
String JAKARTA_HBM2DDL_DB_NAME = "jakarta.persistence.database-product-name";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the name of the database provider in cases where a Connection to the underlying database is
|
||||||
|
* not available (aka, mainly in generating scripts). This value is used to help more precisely determine
|
||||||
|
* how to perform schema generation tasks for the underlying database in cases where
|
||||||
|
* {@value #DIALECT_DB_NAME} does not provide enough distinction.
|
||||||
|
* <p/>
|
||||||
|
* The value of this setting is expected to match the value returned by
|
||||||
|
* {@link java.sql.DatabaseMetaData#getDatabaseProductVersion()} for the target database.
|
||||||
|
*
|
||||||
|
* @see #DIALECT_DB_NAME
|
||||||
|
*/
|
||||||
|
String JAKARTA_DIALECT_DB_VERSION = "javax.persistence.database-product-version";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the major version of the underlying database, as would be returned by
|
||||||
|
* {@link java.sql.DatabaseMetaData#getDatabaseMajorVersion} for the target database. This value is used to
|
||||||
|
* help more precisely determine how to perform schema generation tasks for the underlying database in cases
|
||||||
|
* where {@value #HBM2DDL_DB_NAME} does not provide enough distinction.
|
||||||
|
|
||||||
|
* @see #HBM2DDL_DB_NAME
|
||||||
|
* @see #HBM2DDL_DB_MINOR_VERSION
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_DB_MAJOR_VERSION = "jakarta.persistence.database-major-version";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the minor version of the underlying database, as would be returned by
|
||||||
|
* {@link java.sql.DatabaseMetaData#getDatabaseMinorVersion} for the target database. This value is used to
|
||||||
|
* help more precisely determine how to perform schema generation tasks for the underlying database in cases
|
||||||
|
* where the combination of {@value #HBM2DDL_DB_NAME} and {@value #HBM2DDL_DB_MAJOR_VERSION} does not provide
|
||||||
|
* enough distinction.
|
||||||
|
*
|
||||||
|
* @see #HBM2DDL_DB_NAME
|
||||||
|
* @see #HBM2DDL_DB_MAJOR_VERSION
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_DB_MINOR_VERSION = "jakarta.persistence.database-minor-version";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies whether schema generation commands for schema creation are to be determined based on object/relational
|
||||||
|
* mapping metadata, DDL scripts, or a combination of the two. See {@link SourceType} for valid set of values.
|
||||||
|
* If no value is specified, a default is assumed as follows:<ul>
|
||||||
|
* <li>
|
||||||
|
* if source scripts are specified (per {@value #HBM2DDL_CREATE_SCRIPT_SOURCE}),then "scripts" is assumed
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* otherwise, "metadata" is assumed
|
||||||
|
* </li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @see SourceType
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_CREATE_SOURCE = "jakarta.persistence.schema-generation.create-source";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies whether schema generation commands for schema dropping are to be determined based on object/relational
|
||||||
|
* mapping metadata, DDL scripts, or a combination of the two. See {@link SourceType} for valid set of values.
|
||||||
|
* If no value is specified, a default is assumed as follows:<ul>
|
||||||
|
* <li>
|
||||||
|
* if source scripts are specified (per {@value #HBM2DDL_DROP_SCRIPT_SOURCE}),then "scripts" is assumed
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* otherwise, "metadata" is assumed
|
||||||
|
* </li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @see SourceType
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_DROP_SOURCE = "jakarta.persistence.schema-generation.drop-source";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the CREATE script file as either a {@link java.io.Reader} configured for reading of the DDL script
|
||||||
|
* file or a string designating a file {@link java.net.URL} for the DDL script.
|
||||||
|
* <p/>
|
||||||
|
* Hibernate historically also accepted {@link #HBM2DDL_IMPORT_FILES} for a similar purpose. This setting
|
||||||
|
* should be preferred over {@link #HBM2DDL_IMPORT_FILES} moving forward
|
||||||
|
*
|
||||||
|
* @see #HBM2DDL_CREATE_SOURCE
|
||||||
|
* @see #HBM2DDL_IMPORT_FILES
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_CREATE_SCRIPT_SOURCE = "jakarta.persistence.schema-generation.create-script-source";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the DROP script file as either a {@link java.io.Reader} configured for reading of the DDL script
|
||||||
|
* file or a string designating a file {@link java.net.URL} for the DDL script.
|
||||||
|
*
|
||||||
|
* @see #HBM2DDL_DROP_SOURCE
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_DROP_SCRIPT_SOURCE = "jakarta.persistence.schema-generation.drop-script-source";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For cases where the {@value #HBM2DDL_SCRIPTS_ACTION} value indicates that schema creation commands should
|
||||||
|
* be written to DDL script file, {@value #HBM2DDL_SCRIPTS_CREATE_TARGET} specifies either a
|
||||||
|
* {@link java.io.Writer} configured for output of the DDL script or a string specifying the file URL for the DDL
|
||||||
|
* script.
|
||||||
|
*
|
||||||
|
* @see #HBM2DDL_SCRIPTS_ACTION
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("JavaDoc")
|
||||||
|
String JAKARTA_HBM2DDL_SCRIPTS_CREATE_TARGET = "jakarta.persistence.schema-generation.scripts.create-target";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For cases where the {@value #HBM2DDL_SCRIPTS_ACTION} value indicates that schema drop commands should
|
||||||
|
* be written to DDL script file, {@value #HBM2DDL_SCRIPTS_DROP_TARGET} specifies either a
|
||||||
|
* {@link java.io.Writer} configured for output of the DDL script or a string specifying the file URL for the DDL
|
||||||
|
* script.
|
||||||
|
*
|
||||||
|
* @see #HBM2DDL_SCRIPTS_ACTION
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("JavaDoc")
|
||||||
|
String JAKARTA_HBM2DDL_SCRIPTS_DROP_TARGET = "jakarta.persistence.schema-generation.scripts.drop-target";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* JPA variant of {@link #HBM2DDL_IMPORT_FILES}
|
||||||
|
* <p/>
|
||||||
|
* Specifies a {@link java.io.Reader} configured for reading of the SQL load script or a string designating the
|
||||||
|
* file {@link java.net.URL} for the SQL load script.
|
||||||
|
* <p/>
|
||||||
|
* A "SQL load script" is a script that performs some database initialization (INSERT, etc).
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE = "jakarta.persistence.sql-load-script-source";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The JPA variant of {@link #HBM2DDL_CREATE_NAMESPACES}
|
||||||
|
* <p/>
|
||||||
|
* Specifies whether the persistence provider is to create the database schema(s) in addition to creating
|
||||||
|
* database objects (tables, sequences, constraints, etc). The value of this boolean property should be set
|
||||||
|
* to {@code true} if the persistence provider is to create schemas in the database or to generate DDL that
|
||||||
|
* contains "CREATE SCHEMA" commands. If this property is not supplied (or is explicitly {@code false}), the
|
||||||
|
* provider should not attempt to create database schemas.
|
||||||
|
*/
|
||||||
|
String JAKARTA_HBM2DDL_CREATE_SCHEMAS = "jakarta.persistence.create-database-schemas";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @deprecated Use {@link #HBM2DDL_CREATE_SCHEMAS} instead: this variable name had a typo.
|
* @deprecated Use {@link #HBM2DDL_CREATE_SCHEMAS} instead: this variable name had a typo.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -67,6 +67,7 @@ import org.hibernate.boot.spi.MetadataBuildingContext;
|
||||||
import org.hibernate.cfg.AccessType;
|
import org.hibernate.cfg.AccessType;
|
||||||
import org.hibernate.cfg.AnnotationBinder;
|
import org.hibernate.cfg.AnnotationBinder;
|
||||||
import org.hibernate.cfg.BinderHelper;
|
import org.hibernate.cfg.BinderHelper;
|
||||||
|
import org.hibernate.cfg.Ejb3DiscriminatorColumn;
|
||||||
import org.hibernate.cfg.Ejb3JoinColumn;
|
import org.hibernate.cfg.Ejb3JoinColumn;
|
||||||
import org.hibernate.cfg.InheritanceState;
|
import org.hibernate.cfg.InheritanceState;
|
||||||
import org.hibernate.cfg.ObjectNameSource;
|
import org.hibernate.cfg.ObjectNameSource;
|
||||||
|
|
|
@ -20,6 +20,7 @@ import org.hibernate.LockMode;
|
||||||
import org.hibernate.LockOptions;
|
import org.hibernate.LockOptions;
|
||||||
import org.hibernate.MappingException;
|
import org.hibernate.MappingException;
|
||||||
import org.hibernate.annotations.QueryHints;
|
import org.hibernate.annotations.QueryHints;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
import org.hibernate.internal.util.LockModeConverter;
|
import org.hibernate.internal.util.LockModeConverter;
|
||||||
import org.hibernate.internal.util.config.ConfigurationHelper;
|
import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||||
|
|
||||||
|
@ -132,7 +133,10 @@ public class QueryHintDefinition {
|
||||||
|
|
||||||
public LockOptions determineLockOptions(NamedQuery namedQueryAnnotation) {
|
public LockOptions determineLockOptions(NamedQuery namedQueryAnnotation) {
|
||||||
LockModeType lockModeType = namedQueryAnnotation.lockMode();
|
LockModeType lockModeType = namedQueryAnnotation.lockMode();
|
||||||
Integer lockTimeoutHint = getInteger( "javax.persistence.lock.timeout" );
|
Integer lockTimeoutHint = getInteger( AvailableSettings.JPA_LOCK_TIMEOUT );
|
||||||
|
if ( lockTimeoutHint == null ) {
|
||||||
|
lockTimeoutHint = getInteger( AvailableSettings.JAKARTA_JPA_LOCK_TIMEOUT );
|
||||||
|
}
|
||||||
Boolean followOnLocking = getBoolean( QueryHints.FOLLOW_ON_LOCKING );
|
Boolean followOnLocking = getBoolean( QueryHints.FOLLOW_ON_LOCKING );
|
||||||
|
|
||||||
return determineLockOptions(lockModeType, lockTimeoutHint, followOnLocking);
|
return determineLockOptions(lockModeType, lockTimeoutHint, followOnLocking);
|
||||||
|
|
|
@ -33,8 +33,10 @@ public class BeanValidationIntegrator implements Integrator {
|
||||||
public static final String APPLY_CONSTRAINTS = "hibernate.validator.apply_to_ddl";
|
public static final String APPLY_CONSTRAINTS = "hibernate.validator.apply_to_ddl";
|
||||||
|
|
||||||
public static final String BV_CHECK_CLASS = "javax.validation.ConstraintViolation";
|
public static final String BV_CHECK_CLASS = "javax.validation.ConstraintViolation";
|
||||||
|
public static final String JAKARTA_BV_CHECK_CLASS = "jakarta.validation.ConstraintViolation";
|
||||||
|
|
||||||
public static final String MODE_PROPERTY = "javax.persistence.validation.mode";
|
public static final String MODE_PROPERTY = "javax.persistence.validation.mode";
|
||||||
|
public static final String JAKARTA_MODE_PROPERTY = "jakarta.persistence.validation.mode";
|
||||||
|
|
||||||
private static final String ACTIVATOR_CLASS_NAME = "org.hibernate.cfg.beanvalidation.TypeSafeActivator";
|
private static final String ACTIVATOR_CLASS_NAME = "org.hibernate.cfg.beanvalidation.TypeSafeActivator";
|
||||||
private static final String VALIDATE_SUPPLIED_FACTORY_METHOD_NAME = "validateSuppliedFactory";
|
private static final String VALIDATE_SUPPLIED_FACTORY_METHOD_NAME = "validateSuppliedFactory";
|
||||||
|
@ -87,7 +89,11 @@ public class BeanValidationIntegrator implements Integrator {
|
||||||
final SessionFactoryServiceRegistry serviceRegistry) {
|
final SessionFactoryServiceRegistry serviceRegistry) {
|
||||||
final ConfigurationService cfgService = serviceRegistry.getService( ConfigurationService.class );
|
final ConfigurationService cfgService = serviceRegistry.getService( ConfigurationService.class );
|
||||||
// IMPL NOTE : see the comments on ActivationContext.getValidationModes() as to why this is multi-valued...
|
// IMPL NOTE : see the comments on ActivationContext.getValidationModes() as to why this is multi-valued...
|
||||||
final Set<ValidationMode> modes = ValidationMode.getModes( cfgService.getSettings().get( MODE_PROPERTY ) );
|
Object modeSetting = cfgService.getSettings().get( MODE_PROPERTY );
|
||||||
|
if ( modeSetting == null ) {
|
||||||
|
modeSetting = cfgService.getSettings().get( JAKARTA_MODE_PROPERTY );
|
||||||
|
}
|
||||||
|
final Set<ValidationMode> modes = ValidationMode.getModes( modeSetting );
|
||||||
if ( modes.size() > 1 ) {
|
if ( modes.size() > 1 ) {
|
||||||
LOG.multipleValidationModes( ValidationMode.loggable( modes ) );
|
LOG.multipleValidationModes( ValidationMode.loggable( modes ) );
|
||||||
}
|
}
|
||||||
|
@ -157,7 +163,13 @@ public class BeanValidationIntegrator implements Integrator {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
return false;
|
try {
|
||||||
|
classLoaderService.classForName( JAKARTA_BV_CHECK_CLASS );
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (Exception e2) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.hibernate.internal.util.collections.CollectionHelper;
|
||||||
*/
|
*/
|
||||||
public class GroupsPerOperation {
|
public class GroupsPerOperation {
|
||||||
private static final String JPA_GROUP_PREFIX = "javax.persistence.validation.group.";
|
private static final String JPA_GROUP_PREFIX = "javax.persistence.validation.group.";
|
||||||
|
private static final String JAKARTA_JPA_GROUP_PREFIX = "javax.persistence.validation.group.";
|
||||||
private static final String HIBERNATE_GROUP_PREFIX = "org.hibernate.validator.group.";
|
private static final String HIBERNATE_GROUP_PREFIX = "org.hibernate.validator.group.";
|
||||||
|
|
||||||
private static final Class<?>[] DEFAULT_GROUPS = new Class<?>[] { Default.class };
|
private static final Class<?>[] DEFAULT_GROUPS = new Class<?>[] { Default.class };
|
||||||
|
@ -55,7 +56,10 @@ public class GroupsPerOperation {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Class<?>[] buildGroupsForOperation(Operation operation, Map settings, ClassLoaderAccess classLoaderAccess) {
|
public static Class<?>[] buildGroupsForOperation(Operation operation, Map settings, ClassLoaderAccess classLoaderAccess) {
|
||||||
final Object property = settings.get( operation.getGroupPropertyName() );
|
Object property = settings.get( operation.getGroupPropertyName() );
|
||||||
|
if ( property == null ) {
|
||||||
|
property = settings.get( operation.getJakartaGroupPropertyName() );
|
||||||
|
}
|
||||||
|
|
||||||
if ( property == null ) {
|
if ( property == null ) {
|
||||||
return operation == Operation.DELETE ? EMPTY_GROUPS : DEFAULT_GROUPS;
|
return operation == Operation.DELETE ? EMPTY_GROUPS : DEFAULT_GROUPS;
|
||||||
|
@ -96,18 +100,20 @@ public class GroupsPerOperation {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static enum Operation {
|
public static enum Operation {
|
||||||
INSERT("persist", JPA_GROUP_PREFIX + "pre-persist"),
|
INSERT( "persist", JPA_GROUP_PREFIX + "pre-persist", JAKARTA_JPA_GROUP_PREFIX + "pre-persist" ),
|
||||||
UPDATE("update", JPA_GROUP_PREFIX + "pre-update"),
|
UPDATE( "update", JPA_GROUP_PREFIX + "pre-update", JAKARTA_JPA_GROUP_PREFIX + "pre-update" ),
|
||||||
DELETE("remove", JPA_GROUP_PREFIX + "pre-remove"),
|
DELETE( "remove", JPA_GROUP_PREFIX + "pre-remove", JAKARTA_JPA_GROUP_PREFIX + "pre-remove" ),
|
||||||
DDL("ddl", HIBERNATE_GROUP_PREFIX + "ddl");
|
DDL( "ddl", HIBERNATE_GROUP_PREFIX + "ddl", HIBERNATE_GROUP_PREFIX + "ddl" );
|
||||||
|
|
||||||
|
|
||||||
private final String exposedName;
|
private final String exposedName;
|
||||||
private final String groupPropertyName;
|
private final String groupPropertyName;
|
||||||
|
private final String jakartaGroupPropertyName;
|
||||||
|
|
||||||
Operation(String exposedName, String groupProperty) {
|
Operation(String exposedName, String groupProperty, String jakartaGroupPropertyName) {
|
||||||
this.exposedName = exposedName;
|
this.exposedName = exposedName;
|
||||||
this.groupPropertyName = groupProperty;
|
this.groupPropertyName = groupProperty;
|
||||||
|
this.jakartaGroupPropertyName = jakartaGroupPropertyName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
|
@ -117,6 +123,10 @@ public class GroupsPerOperation {
|
||||||
public String getGroupPropertyName() {
|
public String getGroupPropertyName() {
|
||||||
return groupPropertyName;
|
return groupPropertyName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getJakartaGroupPropertyName() {
|
||||||
|
return jakartaGroupPropertyName;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,7 @@ import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
|
||||||
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
|
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
|
||||||
import org.hibernate.boot.spi.ClassLoaderAccess;
|
import org.hibernate.boot.spi.ClassLoaderAccess;
|
||||||
import org.hibernate.boot.spi.SessionFactoryOptions;
|
import org.hibernate.boot.spi.SessionFactoryOptions;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
import org.hibernate.cfg.Environment;
|
import org.hibernate.cfg.Environment;
|
||||||
import org.hibernate.dialect.Dialect;
|
import org.hibernate.dialect.Dialect;
|
||||||
import org.hibernate.engine.config.spi.ConfigurationService;
|
import org.hibernate.engine.config.spi.ConfigurationService;
|
||||||
|
@ -60,8 +61,6 @@ class TypeSafeActivator {
|
||||||
|
|
||||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, TypeSafeActivator.class.getName());
|
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, TypeSafeActivator.class.getName());
|
||||||
|
|
||||||
private static final String FACTORY_PROPERTY = "javax.persistence.validation.factory";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to validate a supplied ValidatorFactory instance as being castable to ValidatorFactory.
|
* Used to validate a supplied ValidatorFactory instance as being castable to ValidatorFactory.
|
||||||
*
|
*
|
||||||
|
@ -532,7 +531,7 @@ class TypeSafeActivator {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static ValidatorFactory resolveProvidedFactory(ConfigurationService cfgService) {
|
private static ValidatorFactory resolveProvidedFactory(ConfigurationService cfgService) {
|
||||||
return cfgService.getSetting(
|
return cfgService.getSetting(
|
||||||
FACTORY_PROPERTY,
|
AvailableSettings.JPA_VALIDATION_FACTORY,
|
||||||
new ConfigurationService.Converter<ValidatorFactory>() {
|
new ConfigurationService.Converter<ValidatorFactory>() {
|
||||||
@Override
|
@Override
|
||||||
public ValidatorFactory convert(Object value) {
|
public ValidatorFactory convert(Object value) {
|
||||||
|
@ -544,7 +543,7 @@ class TypeSafeActivator {
|
||||||
String.format(
|
String.format(
|
||||||
Locale.ENGLISH,
|
Locale.ENGLISH,
|
||||||
"ValidatorFactory reference (provided via `%s` setting) was not castable to %s : %s",
|
"ValidatorFactory reference (provided via `%s` setting) was not castable to %s : %s",
|
||||||
FACTORY_PROPERTY,
|
AvailableSettings.JPA_VALIDATION_FACTORY,
|
||||||
ValidatorFactory.class.getName(),
|
ValidatorFactory.class.getName(),
|
||||||
value.getClass().getName()
|
value.getClass().getName()
|
||||||
)
|
)
|
||||||
|
@ -552,7 +551,29 @@ class TypeSafeActivator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
null
|
cfgService.getSetting(
|
||||||
|
AvailableSettings.JAKARTA_JPA_VALIDATION_FACTORY,
|
||||||
|
new ConfigurationService.Converter<ValidatorFactory>() {
|
||||||
|
@Override
|
||||||
|
public ValidatorFactory convert(Object value) {
|
||||||
|
try {
|
||||||
|
return ValidatorFactory.class.cast( value );
|
||||||
|
}
|
||||||
|
catch ( ClassCastException e ) {
|
||||||
|
throw new IntegrationException(
|
||||||
|
String.format(
|
||||||
|
Locale.ENGLISH,
|
||||||
|
"ValidatorFactory reference (provided via `%s` setting) was not castable to %s : %s",
|
||||||
|
AvailableSettings.JAKARTA_JPA_VALIDATION_FACTORY,
|
||||||
|
ValidatorFactory.class.getName(),
|
||||||
|
value.getClass().getName()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
null
|
||||||
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.cfg.internal;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.hibernate.MappingException;
|
||||||
|
import org.hibernate.cfg.SecondPass;
|
||||||
|
import org.hibernate.mapping.Column;
|
||||||
|
import org.hibernate.mapping.PersistentClass;
|
||||||
|
import org.hibernate.mapping.Selectable;
|
||||||
|
import org.hibernate.mapping.Subclass;
|
||||||
|
|
||||||
|
public class NullableDiscriminatorColumnSecondPass implements SecondPass {
|
||||||
|
private final String rootEntityName;
|
||||||
|
|
||||||
|
public NullableDiscriminatorColumnSecondPass(String rootEntityName) {
|
||||||
|
this.rootEntityName = rootEntityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
public void doSecondPass(Map persistentClasses) throws MappingException {
|
||||||
|
PersistentClass rootPersistenceClass = (PersistentClass) persistentClasses.get( rootEntityName );
|
||||||
|
if ( hasNullDiscriminatorValue( rootPersistenceClass ) ) {
|
||||||
|
for ( Iterator<Selectable> iterator = rootPersistenceClass.getDiscriminator().getColumnIterator();
|
||||||
|
iterator.hasNext(); ) {
|
||||||
|
Selectable selectable = iterator.next();
|
||||||
|
if ( selectable instanceof Column ) {
|
||||||
|
( (Column) selectable ).setNullable( true );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings({ "unchecked" })
|
||||||
|
private boolean hasNullDiscriminatorValue(PersistentClass rootPersistenceClass) {
|
||||||
|
if ( rootPersistenceClass.isDiscriminatorValueNull() ) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
Iterator<Subclass> subclassIterator = rootPersistenceClass.getSubclassIterator();
|
||||||
|
while ( subclassIterator.hasNext() ) {
|
||||||
|
if ( subclassIterator.next().isDiscriminatorValueNull() ) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -331,9 +331,7 @@ public abstract class AbstractEntityEntry implements Serializable, EntityEntry {
|
||||||
if ( enhancementAsProxyLazinessInterceptor.hasWrittenFieldNames() ) {
|
if ( enhancementAsProxyLazinessInterceptor.hasWrittenFieldNames() ) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// When a proxy has dirty attributes, we have to treat it like a normal entity to flush changes
|
return true;
|
||||||
return !enhancementAsProxyLazinessInterceptor.isInitialized()
|
|
||||||
|| !persister.hasCollections() && !( (SelfDirtinessTracker) entity ).$$_hibernate_hasDirtyAttributes();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if ( entity instanceof HibernateProxy ) {
|
else if ( entity instanceof HibernateProxy ) {
|
||||||
|
|
|
@ -101,28 +101,6 @@ public final class TwoPhaseLoad {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated This method will be removed. Use {@link #initializeEntity(Object, boolean, SharedSessionContractImplementor, PreLoadEvent, Iterable)} instead.
|
|
||||||
*
|
|
||||||
* @param entity The entity being loaded
|
|
||||||
* @param readOnly Is the entity being loaded as read-only
|
|
||||||
* @param session The Session
|
|
||||||
* @param preLoadEvent The (re-used) pre-load event
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public static void initializeEntity(
|
|
||||||
final Object entity,
|
|
||||||
final boolean readOnly,
|
|
||||||
final SharedSessionContractImplementor session,
|
|
||||||
final PreLoadEvent preLoadEvent) {
|
|
||||||
final EventListenerGroup<PreLoadEventListener> listenerGroup = session
|
|
||||||
.getFactory()
|
|
||||||
.getFastSessionServices()
|
|
||||||
.eventListenerGroup_PRE_LOAD;
|
|
||||||
final Iterable<PreLoadEventListener> listeners = listenerGroup.listeners();
|
|
||||||
initializeEntity( entity, readOnly, session, preLoadEvent, listeners, EntityResolver.DEFAULT );
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform the second step of 2-phase load. Fully initialize the entity
|
* Perform the second step of 2-phase load. Fully initialize the entity
|
||||||
* instance.
|
* instance.
|
||||||
|
@ -135,15 +113,13 @@ public final class TwoPhaseLoad {
|
||||||
* @param readOnly Is the entity being loaded as read-only
|
* @param readOnly Is the entity being loaded as read-only
|
||||||
* @param session The Session
|
* @param session The Session
|
||||||
* @param preLoadEvent The (re-used) pre-load event
|
* @param preLoadEvent The (re-used) pre-load event
|
||||||
* @param preLoadEventListeners the pre-load event listeners
|
|
||||||
*/
|
*/
|
||||||
public static void initializeEntity(
|
public static void initializeEntity(
|
||||||
final Object entity,
|
final Object entity,
|
||||||
final boolean readOnly,
|
final boolean readOnly,
|
||||||
final SharedSessionContractImplementor session,
|
final SharedSessionContractImplementor session,
|
||||||
final PreLoadEvent preLoadEvent,
|
final PreLoadEvent preLoadEvent) {
|
||||||
final Iterable<PreLoadEventListener> preLoadEventListeners) {
|
initializeEntity( entity, readOnly, session, preLoadEvent, EntityResolver.DEFAULT );
|
||||||
initializeEntity( entity, readOnly, session, preLoadEvent, preLoadEventListeners, EntityResolver.DEFAULT );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -158,7 +134,6 @@ public final class TwoPhaseLoad {
|
||||||
* @param readOnly Is the entity being loaded as read-only
|
* @param readOnly Is the entity being loaded as read-only
|
||||||
* @param session The Session
|
* @param session The Session
|
||||||
* @param preLoadEvent The (re-used) pre-load event
|
* @param preLoadEvent The (re-used) pre-load event
|
||||||
* @param preLoadEventListeners the pre-load event listeners
|
|
||||||
* @param entityResolver the resolver used for to-one entity associations
|
* @param entityResolver the resolver used for to-one entity associations
|
||||||
* (not used when an entity is a bytecode-enhanced lazy entity)
|
* (not used when an entity is a bytecode-enhanced lazy entity)
|
||||||
*/
|
*/
|
||||||
|
@ -167,7 +142,6 @@ public final class TwoPhaseLoad {
|
||||||
final boolean readOnly,
|
final boolean readOnly,
|
||||||
final SharedSessionContractImplementor session,
|
final SharedSessionContractImplementor session,
|
||||||
final PreLoadEvent preLoadEvent,
|
final PreLoadEvent preLoadEvent,
|
||||||
final Iterable<PreLoadEventListener> preLoadEventListeners,
|
|
||||||
final EntityResolver entityResolver) {
|
final EntityResolver entityResolver) {
|
||||||
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
|
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
|
||||||
final EntityEntry entityEntry = persistenceContext.getEntry( entity );
|
final EntityEntry entityEntry = persistenceContext.getEntry( entity );
|
||||||
|
@ -175,7 +149,7 @@ public final class TwoPhaseLoad {
|
||||||
throw new AssertionFailure( "possible non-threadsafe access to the session" );
|
throw new AssertionFailure( "possible non-threadsafe access to the session" );
|
||||||
}
|
}
|
||||||
initializeEntityEntryLoadedState( entity, entityEntry, session, entityResolver );
|
initializeEntityEntryLoadedState( entity, entityEntry, session, entityResolver );
|
||||||
initializeEntityFromEntityEntryLoadedState( entity, entityEntry, readOnly, session, preLoadEvent, preLoadEventListeners );
|
initializeEntityFromEntityEntryLoadedState( entity, entityEntry, readOnly, session, preLoadEvent );
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void initializeEntityEntryLoadedState(
|
public static void initializeEntityEntryLoadedState(
|
||||||
|
@ -255,8 +229,7 @@ public final class TwoPhaseLoad {
|
||||||
final EntityEntry entityEntry,
|
final EntityEntry entityEntry,
|
||||||
final boolean readOnly,
|
final boolean readOnly,
|
||||||
final SharedSessionContractImplementor session,
|
final SharedSessionContractImplementor session,
|
||||||
final PreLoadEvent preLoadEvent,
|
final PreLoadEvent preLoadEvent) throws HibernateException {
|
||||||
final Iterable<PreLoadEventListener> preLoadEventListeners) throws HibernateException {
|
|
||||||
|
|
||||||
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
|
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
|
||||||
final EntityPersister persister = entityEntry.getPersister();
|
final EntityPersister persister = entityEntry.getPersister();
|
||||||
|
@ -268,9 +241,8 @@ public final class TwoPhaseLoad {
|
||||||
//Must occur after resolving identifiers!
|
//Must occur after resolving identifiers!
|
||||||
if ( session.isEventSource() ) {
|
if ( session.isEventSource() ) {
|
||||||
preLoadEvent.setEntity( entity ).setState( hydratedState ).setId( id ).setPersister( persister );
|
preLoadEvent.setEntity( entity ).setState( hydratedState ).setId( id ).setPersister( persister );
|
||||||
for ( PreLoadEventListener listener : preLoadEventListeners ) {
|
session.getFactory().getFastSessionServices()
|
||||||
listener.onPreLoad( preLoadEvent );
|
.eventListenerGroup_PRE_LOAD.fireEventOnEachListener( preLoadEvent, PreLoadEventListener::onPreLoad );
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
persister.setPropertyValues( entity, hydratedState );
|
persister.setPropertyValues( entity, hydratedState );
|
||||||
|
@ -583,7 +555,7 @@ public final class TwoPhaseLoad {
|
||||||
/**
|
/**
|
||||||
* Implementations determine how a to-one associations is resolved.
|
* Implementations determine how a to-one associations is resolved.
|
||||||
*
|
*
|
||||||
* @see #initializeEntity(Object, boolean, SharedSessionContractImplementor, PreLoadEvent, Iterable, EntityResolver)
|
* @see #initializeEntity(Object, boolean, SharedSessionContractImplementor, PreLoadEvent, EntityResolver)
|
||||||
*/
|
*/
|
||||||
public interface EntityResolver {
|
public interface EntityResolver {
|
||||||
|
|
||||||
|
|
|
@ -113,8 +113,14 @@ public class EffectiveEntityGraph implements AppliedGraph, Serializable {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
final RootGraphImplementor fetchHint = (RootGraphImplementor) properties.get( GraphSemantic.FETCH.getJpaHintName() );
|
RootGraphImplementor fetchHint = (RootGraphImplementor) properties.get( GraphSemantic.FETCH.getJpaHintName() );
|
||||||
final RootGraphImplementor loadHint = (RootGraphImplementor) properties.get( GraphSemantic.LOAD.getJpaHintName() );
|
RootGraphImplementor loadHint = (RootGraphImplementor) properties.get( GraphSemantic.LOAD.getJpaHintName() );
|
||||||
|
if (fetchHint == null) {
|
||||||
|
fetchHint = (RootGraphImplementor) properties.get( GraphSemantic.FETCH.getJakartaJpaHintName() );
|
||||||
|
}
|
||||||
|
if (loadHint == null) {
|
||||||
|
loadHint = (RootGraphImplementor) properties.get( GraphSemantic.LOAD.getJakartaJpaHintName() );
|
||||||
|
}
|
||||||
|
|
||||||
if ( fetchHint == null && loadHint == null ) {
|
if ( fetchHint == null && loadHint == null ) {
|
||||||
log.debugf( "Neither LOAD nor FETCH graph were found in properties" );
|
log.debugf( "Neither LOAD nor FETCH graph were found in properties" );
|
||||||
|
|
|
@ -104,8 +104,8 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
||||||
logFlushResults( event );
|
logFlushResults( event );
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings( value = {"unchecked"} )
|
@SuppressWarnings("unchecked")
|
||||||
private void logFlushResults(FlushEvent event) {
|
protected void logFlushResults(FlushEvent event) {
|
||||||
if ( !LOG.isDebugEnabled() ) {
|
if ( !LOG.isDebugEnabled() ) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -219,7 +219,6 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
||||||
final int count = entityEntries.length;
|
final int count = entityEntries.length;
|
||||||
|
|
||||||
for ( Map.Entry<Object,EntityEntry> me : entityEntries ) {
|
for ( Map.Entry<Object,EntityEntry> me : entityEntries ) {
|
||||||
|
|
||||||
// Update the status of the object and if necessary, schedule an update
|
// Update the status of the object and if necessary, schedule an update
|
||||||
|
|
||||||
EntityEntry entry = me.getValue();
|
EntityEntry entry = me.getValue();
|
||||||
|
|
|
@ -454,6 +454,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
|
||||||
private boolean hasDirtyCollections(FlushEntityEvent event, EntityPersister persister, Status status) {
|
private boolean hasDirtyCollections(FlushEntityEvent event, EntityPersister persister, Status status) {
|
||||||
if ( isCollectionDirtyCheckNecessary( persister, status ) ) {
|
if ( isCollectionDirtyCheckNecessary( persister, status ) ) {
|
||||||
DirtyCollectionSearchVisitor visitor = new DirtyCollectionSearchVisitor(
|
DirtyCollectionSearchVisitor visitor = new DirtyCollectionSearchVisitor(
|
||||||
|
event.getEntity(),
|
||||||
event.getSession(),
|
event.getSession(),
|
||||||
persister.getPropertyVersionability()
|
persister.getPropertyVersionability()
|
||||||
);
|
);
|
||||||
|
|
|
@ -7,7 +7,9 @@
|
||||||
package org.hibernate.event.internal;
|
package org.hibernate.event.internal;
|
||||||
|
|
||||||
import org.hibernate.HibernateException;
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLazinessInterceptor;
|
||||||
import org.hibernate.collection.spi.PersistentCollection;
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
|
import org.hibernate.engine.spi.PersistentAttributeInterceptable;
|
||||||
import org.hibernate.engine.spi.SessionImplementor;
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
import org.hibernate.event.spi.EventSource;
|
import org.hibernate.event.spi.EventSource;
|
||||||
import org.hibernate.type.CollectionType;
|
import org.hibernate.type.CollectionType;
|
||||||
|
@ -22,11 +24,19 @@ import org.hibernate.type.CollectionType;
|
||||||
*/
|
*/
|
||||||
public class DirtyCollectionSearchVisitor extends AbstractVisitor {
|
public class DirtyCollectionSearchVisitor extends AbstractVisitor {
|
||||||
|
|
||||||
|
private final EnhancementAsProxyLazinessInterceptor interceptor;
|
||||||
|
private final boolean[] propertyVersionability;
|
||||||
private boolean dirty;
|
private boolean dirty;
|
||||||
private boolean[] propertyVersionability;
|
|
||||||
|
|
||||||
public DirtyCollectionSearchVisitor(EventSource session, boolean[] propertyVersionability) {
|
public DirtyCollectionSearchVisitor(Object entity, EventSource session, boolean[] propertyVersionability) {
|
||||||
super( session );
|
super( session );
|
||||||
|
EnhancementAsProxyLazinessInterceptor interceptor = null;
|
||||||
|
if ( entity instanceof PersistentAttributeInterceptable ) {
|
||||||
|
if ( ( (PersistentAttributeInterceptable) entity ).$$_hibernate_getInterceptor() instanceof EnhancementAsProxyLazinessInterceptor ) {
|
||||||
|
interceptor = (EnhancementAsProxyLazinessInterceptor) ( (PersistentAttributeInterceptable) entity ).$$_hibernate_getInterceptor();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.interceptor = interceptor;
|
||||||
this.propertyVersionability = propertyVersionability;
|
this.propertyVersionability = propertyVersionability;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,6 +55,9 @@ public class DirtyCollectionSearchVisitor extends AbstractVisitor {
|
||||||
// return (ah==null) ? true : searchForDirtyCollections(ah, type);
|
// return (ah==null) ? true : searchForDirtyCollections(ah, type);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
if ( interceptor != null && !interceptor.isAttributeLoaded( type.getName() ) ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
// if not wrapped yet, its dirty (this can't occur, because
|
// if not wrapped yet, its dirty (this can't occur, because
|
||||||
// we now always call wrap() before getting to here)
|
// we now always call wrap() before getting to here)
|
||||||
// return ( ! (obj instanceof PersistentCollection) ) ?
|
// return ( ! (obj instanceof PersistentCollection) ) ?
|
||||||
|
|
|
@ -13,7 +13,6 @@ import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLaziness
|
||||||
import org.hibernate.collection.spi.PersistentCollection;
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
import org.hibernate.engine.spi.PersistenceContext;
|
import org.hibernate.engine.spi.PersistenceContext;
|
||||||
import org.hibernate.engine.spi.PersistentAttributeInterceptable;
|
import org.hibernate.engine.spi.PersistentAttributeInterceptable;
|
||||||
import org.hibernate.engine.spi.PersistentAttributeInterceptor;
|
|
||||||
import org.hibernate.engine.spi.SessionImplementor;
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
import org.hibernate.event.spi.EventSource;
|
import org.hibernate.event.spi.EventSource;
|
||||||
import org.hibernate.internal.CoreLogging;
|
import org.hibernate.internal.CoreLogging;
|
||||||
|
@ -107,11 +106,8 @@ public class WrapVisitor extends ProxyVisitor {
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if ( entity instanceof PersistentAttributeInterceptable ) {
|
if ( entity instanceof PersistentAttributeInterceptable ) {
|
||||||
final PersistentAttributeInterceptor interceptor = ( (PersistentAttributeInterceptable) entity ).$$_hibernate_getInterceptor();
|
if ( ( (PersistentAttributeInterceptable) entity ).$$_hibernate_getInterceptor() instanceof EnhancementAsProxyLazinessInterceptor ) {
|
||||||
if ( interceptor instanceof EnhancementAsProxyLazinessInterceptor ) {
|
return null;
|
||||||
if ( !( (EnhancementAsProxyLazinessInterceptor) interceptor ).isInitialized() ) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,8 +11,12 @@ import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.LinkedHashSet;
|
import java.util.LinkedHashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.concurrent.CompletionStage;
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
import org.hibernate.event.service.spi.DuplicationStrategy;
|
import org.hibernate.event.service.spi.DuplicationStrategy;
|
||||||
|
@ -33,8 +37,10 @@ import org.jboss.logging.Logger;
|
||||||
* @author Sanne Grinovero
|
* @author Sanne Grinovero
|
||||||
*/
|
*/
|
||||||
class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
||||||
|
|
||||||
private static final Logger log = Logger.getLogger( EventListenerGroupImpl.class );
|
private static final Logger log = Logger.getLogger( EventListenerGroupImpl.class );
|
||||||
private static final Set<DuplicationStrategy> DEFAULT_DUPLICATION_STRATEGIES = Collections.unmodifiableSet( makeDefaultDuplicationStrategy() );
|
private static final Set<DuplicationStrategy> DEFAULT_DUPLICATION_STRATEGIES = Collections.unmodifiableSet( makeDefaultDuplicationStrategy() );
|
||||||
|
private static final CompletableFuture COMPLETED = CompletableFuture.completedFuture( null );
|
||||||
|
|
||||||
private final EventType<T> eventType;
|
private final EventType<T> eventType;
|
||||||
private final CallbackRegistry callbackRegistry;
|
private final CallbackRegistry callbackRegistry;
|
||||||
|
@ -114,6 +120,54 @@ class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <R, U, RL> CompletionStage<R> fireEventOnEachListener(
|
||||||
|
final U event,
|
||||||
|
final Function<RL, Function<U, CompletionStage<R>>> fun) {
|
||||||
|
CompletionStage<R> ret = COMPLETED;
|
||||||
|
final T[] ls = listeners;
|
||||||
|
if ( ls != null && ls.length != 0 ) {
|
||||||
|
for ( T listener : ls ) {
|
||||||
|
//to preserve atomicity of the Session methods
|
||||||
|
//call apply() from within the arg of thenCompose()
|
||||||
|
ret = ret.thenCompose( v -> fun.apply( (RL) listener ).apply( event ) );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <R, U, RL, X> CompletionStage<R> fireEventOnEachListener(
|
||||||
|
U event, X param, Function<RL, BiFunction<U, X, CompletionStage<R>>> fun) {
|
||||||
|
CompletionStage<R> ret = COMPLETED;
|
||||||
|
final T[] ls = listeners;
|
||||||
|
if ( ls != null && ls.length != 0 ) {
|
||||||
|
for ( T listener : ls ) {
|
||||||
|
//to preserve atomicity of the Session methods
|
||||||
|
//call apply() from within the arg of thenCompose()
|
||||||
|
ret = ret.thenCompose( v -> fun.apply( (RL) listener ).apply( event, param ) );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <R, U, RL> CompletionStage<R> fireLazyEventOnEachListener(
|
||||||
|
final Supplier<U> eventSupplier,
|
||||||
|
final Function<RL, Function<U, CompletionStage<R>>> fun) {
|
||||||
|
CompletionStage<R> ret = COMPLETED;
|
||||||
|
final T[] ls = listeners;
|
||||||
|
if ( ls != null && ls.length != 0 ) {
|
||||||
|
final U event = eventSupplier.get();
|
||||||
|
for ( T listener : ls ) {
|
||||||
|
//to preserve atomicity of the Session methods
|
||||||
|
//call apply() from within the arg of thenCompose()
|
||||||
|
ret = ret.thenCompose( v -> fun.apply( (RL) listener ).apply( event ) );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void addDuplicationStrategy(DuplicationStrategy strategy) {
|
public void addDuplicationStrategy(DuplicationStrategy strategy) {
|
||||||
if ( duplicationStrategies == DEFAULT_DUPLICATION_STRATEGIES ) {
|
if ( duplicationStrategies == DEFAULT_DUPLICATION_STRATEGIES ) {
|
||||||
|
|
|
@ -7,7 +7,10 @@
|
||||||
package org.hibernate.event.service.spi;
|
package org.hibernate.event.service.spi;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
import java.util.concurrent.CompletionStage;
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
import org.hibernate.Incubating;
|
import org.hibernate.Incubating;
|
||||||
|
@ -112,4 +115,53 @@ public interface EventListenerGroup<T> extends Serializable {
|
||||||
@Incubating
|
@Incubating
|
||||||
<U,X> void fireEventOnEachListener(final U event, X param, final EventActionWithParameter<T,U,X> actionOnEvent);
|
<U,X> void fireEventOnEachListener(final U event, X param, final EventActionWithParameter<T,U,X> actionOnEvent);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to {@link #fireEventOnEachListener(Object, Function)}, but Reactive friendly: it chains
|
||||||
|
* processing of the same event on each Reactive Listener, and returns a {@link CompletionStage} of type R.
|
||||||
|
* The various generic types allow using this for each concrete event type and flexible return types.
|
||||||
|
* <p>Used by Hibernate Reactive</p>
|
||||||
|
* @param event The event being fired
|
||||||
|
* @param fun The function combining each event listener with the event
|
||||||
|
* @param <R> the return type of the returned CompletionStage
|
||||||
|
* @param <U> the type of the event being fired on each listener
|
||||||
|
* @param <RL> the type of ReactiveListener: each listener of type T will be casted to it.
|
||||||
|
* @return the composite completion stage of invoking fun(event) on each listener.
|
||||||
|
*/
|
||||||
|
@Incubating
|
||||||
|
<R, U, RL> CompletionStage<R> fireEventOnEachListener(final U event, final Function<RL, Function<U, CompletionStage<R>>> fun);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to {@link #fireEventOnEachListener(Object, Object, Function)}, but Reactive friendly: it chains
|
||||||
|
* processing of the same event on each Reactive Listener, and returns a {@link CompletionStage} of type R.
|
||||||
|
* The various generic types allow using this for each concrete event type and flexible return types.
|
||||||
|
* <p>Used by Hibernate Reactive</p>
|
||||||
|
* @param event The event being fired
|
||||||
|
* @param fun The function combining each event listener with the event
|
||||||
|
* @param <R> the return type of the returned CompletionStage
|
||||||
|
* @param <U> the type of the event being fired on each listener
|
||||||
|
* @param <RL> the type of ReactiveListener: each listener of type T will be casted to it.
|
||||||
|
* @param <X> an additional parameter to be passed to the function fun
|
||||||
|
* @return the composite completion stage of invoking fun(event) on each listener.
|
||||||
|
*/
|
||||||
|
@Incubating
|
||||||
|
public <R, U, RL, X> CompletionStage<R> fireEventOnEachListener(U event, X param, Function<RL, BiFunction<U, X, CompletionStage<R>>> fun);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to {@link #fireLazyEventOnEachListener(Supplier, BiConsumer)}, but Reactive friendly: it chains
|
||||||
|
* processing of the same event on each Reactive Listener, and returns a {@link CompletionStage} of type R.
|
||||||
|
* The various generic types allow using this for each concrete event type and flexible return types.
|
||||||
|
* <p>This variant expects a Supplier of the event, rather than the event directly; this is useful for the
|
||||||
|
* event types which are commonly configured with no listeners at all, so to allow skipping creating the
|
||||||
|
* event; use only for event types which are known to be expensive while the listeners are commonly empty.</p>
|
||||||
|
* <p>Used by Hibernate Reactive</p>
|
||||||
|
* @param eventSupplier A supplier able to produce the actual event
|
||||||
|
* @param fun The function combining each event listener with the event
|
||||||
|
* @param <R> the return type of the returned CompletionStage
|
||||||
|
* @param <U> the type of the event being fired on each listener
|
||||||
|
* @param <RL> the type of ReactiveListener: each listener of type T will be casted to it.
|
||||||
|
* @return the composite completion stage of invoking fun(event) on each listener.
|
||||||
|
*/
|
||||||
|
@Incubating
|
||||||
|
<R, U, RL> CompletionStage<R> fireLazyEventOnEachListener(final Supplier<U> eventSupplier, final Function<RL, Function<U, CompletionStage<R>>> fun);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ public enum GraphSemantic {
|
||||||
* subsequent select). Attributes that are not specified are treated as
|
* subsequent select). Attributes that are not specified are treated as
|
||||||
* FetchType.LAZY invariably.
|
* FetchType.LAZY invariably.
|
||||||
*/
|
*/
|
||||||
FETCH( "javax.persistence.fetchgraph" ),
|
FETCH( "javax.persistence.fetchgraph", "jakarta.persistence.fetchgraph" ),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicates a "load graph" EntityGraph. Attributes explicitly specified
|
* Indicates a "load graph" EntityGraph. Attributes explicitly specified
|
||||||
|
@ -28,26 +28,32 @@ public enum GraphSemantic {
|
||||||
* FetchType.LAZY or FetchType.EAGER depending on the attribute's definition
|
* FetchType.LAZY or FetchType.EAGER depending on the attribute's definition
|
||||||
* in metadata.
|
* in metadata.
|
||||||
*/
|
*/
|
||||||
LOAD( "javax.persistence.loadgraph" );
|
LOAD( "javax.persistence.loadgraph", "jakarta.persistence.loadgraph" );
|
||||||
|
|
||||||
private final String jpaHintName;
|
private final String jpaHintName;
|
||||||
|
private final String jakartaJpaHintName;
|
||||||
|
|
||||||
GraphSemantic(String jpaHintName) {
|
GraphSemantic(String jpaHintName, String jakartaJpaHintName) {
|
||||||
this.jpaHintName = jpaHintName;
|
this.jpaHintName = jpaHintName;
|
||||||
|
this.jakartaJpaHintName = jakartaJpaHintName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getJpaHintName() {
|
public String getJpaHintName() {
|
||||||
return jpaHintName;
|
return jpaHintName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getJakartaJpaHintName() {
|
||||||
|
return jakartaJpaHintName;
|
||||||
|
}
|
||||||
|
|
||||||
public static GraphSemantic fromJpaHintName(String hintName) {
|
public static GraphSemantic fromJpaHintName(String hintName) {
|
||||||
assert hintName != null;
|
assert hintName != null;
|
||||||
|
|
||||||
if ( FETCH.getJpaHintName().equals( hintName ) ) {
|
if ( FETCH.getJpaHintName().equals( hintName ) || FETCH.getJakartaJpaHintName().equals( hintName ) ) {
|
||||||
return FETCH;
|
return FETCH;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( LOAD.getJpaHintName().equalsIgnoreCase( hintName ) ) {
|
if ( LOAD.getJpaHintName().equalsIgnoreCase( hintName ) || LOAD.getJakartaJpaHintName().equalsIgnoreCase( hintName ) ) {
|
||||||
return LOAD;
|
return LOAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -68,6 +68,10 @@ import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
|
||||||
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
||||||
import org.hibernate.type.descriptor.jdbc.JdbcTypeDescriptor;
|
import org.hibernate.type.descriptor.jdbc.JdbcTypeDescriptor;
|
||||||
|
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_SCOPE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_TIMEOUT;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_SHARED_CACHE_STORE_MODE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_RETRIEVE_MODE;
|
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_RETRIEVE_MODE;
|
||||||
|
@ -274,20 +278,29 @@ public final class FastSessionServices {
|
||||||
HashMap<String,Object> p = new HashMap<>();
|
HashMap<String,Object> p = new HashMap<>();
|
||||||
|
|
||||||
//Static defaults:
|
//Static defaults:
|
||||||
p.put( AvailableSettings.FLUSH_MODE, FlushMode.AUTO.name() );
|
p.putIfAbsent( AvailableSettings.FLUSH_MODE, FlushMode.AUTO.name() );
|
||||||
p.put( JPA_LOCK_SCOPE, PessimisticLockScope.EXTENDED.name() );
|
p.putIfAbsent( JPA_LOCK_SCOPE, PessimisticLockScope.EXTENDED.name() );
|
||||||
p.put( JPA_LOCK_TIMEOUT, LockOptions.WAIT_FOREVER );
|
p.putIfAbsent( JAKARTA_JPA_LOCK_SCOPE, PessimisticLockScope.EXTENDED.name() );
|
||||||
p.put( JPA_SHARED_CACHE_RETRIEVE_MODE, CacheModeHelper.DEFAULT_RETRIEVE_MODE );
|
p.putIfAbsent( JPA_LOCK_TIMEOUT, LockOptions.WAIT_FOREVER );
|
||||||
p.put( JPA_SHARED_CACHE_STORE_MODE, CacheModeHelper.DEFAULT_STORE_MODE );
|
p.putIfAbsent( JAKARTA_JPA_LOCK_TIMEOUT, LockOptions.WAIT_FOREVER );
|
||||||
|
p.putIfAbsent( JPA_SHARED_CACHE_RETRIEVE_MODE, CacheModeHelper.DEFAULT_RETRIEVE_MODE );
|
||||||
|
p.putIfAbsent( JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE, CacheModeHelper.DEFAULT_RETRIEVE_MODE );
|
||||||
|
p.putIfAbsent( JPA_SHARED_CACHE_STORE_MODE, CacheModeHelper.DEFAULT_STORE_MODE );
|
||||||
|
p.putIfAbsent( JAKARTA_JPA_SHARED_CACHE_STORE_MODE, CacheModeHelper.DEFAULT_STORE_MODE );
|
||||||
|
|
||||||
//Defaults defined by SessionFactory configuration:
|
//Defaults defined by SessionFactory configuration:
|
||||||
final String[] ENTITY_MANAGER_SPECIFIC_PROPERTIES = {
|
final String[] ENTITY_MANAGER_SPECIFIC_PROPERTIES = {
|
||||||
JPA_LOCK_SCOPE,
|
JPA_LOCK_SCOPE,
|
||||||
|
JAKARTA_JPA_LOCK_SCOPE,
|
||||||
JPA_LOCK_TIMEOUT,
|
JPA_LOCK_TIMEOUT,
|
||||||
|
JAKARTA_JPA_LOCK_TIMEOUT,
|
||||||
AvailableSettings.FLUSH_MODE,
|
AvailableSettings.FLUSH_MODE,
|
||||||
JPA_SHARED_CACHE_RETRIEVE_MODE,
|
JPA_SHARED_CACHE_RETRIEVE_MODE,
|
||||||
|
JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE,
|
||||||
JPA_SHARED_CACHE_STORE_MODE,
|
JPA_SHARED_CACHE_STORE_MODE,
|
||||||
QueryHints.SPEC_HINT_TIMEOUT
|
JAKARTA_JPA_SHARED_CACHE_STORE_MODE,
|
||||||
|
QueryHints.SPEC_HINT_TIMEOUT,
|
||||||
|
QueryHints.JAKARTA_SPEC_HINT_TIMEOUT
|
||||||
};
|
};
|
||||||
final Map<String, Object> properties = sf.getProperties();
|
final Map<String, Object> properties = sf.getProperties();
|
||||||
for ( String key : ENTITY_MANAGER_SPECIFIC_PROPERTIES ) {
|
for ( String key : ENTITY_MANAGER_SPECIFIC_PROPERTIES ) {
|
||||||
|
@ -327,11 +340,19 @@ public final class FastSessionServices {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static CacheRetrieveMode determineCacheRetrieveMode(Map<String, Object> settings) {
|
private static CacheRetrieveMode determineCacheRetrieveMode(Map<String, Object> settings) {
|
||||||
return ( CacheRetrieveMode ) settings.get( JPA_SHARED_CACHE_RETRIEVE_MODE );
|
final CacheRetrieveMode cacheRetrieveMode = (CacheRetrieveMode) settings.get( JPA_SHARED_CACHE_RETRIEVE_MODE );
|
||||||
|
if ( cacheRetrieveMode == null ) {
|
||||||
|
return (CacheRetrieveMode) settings.get( JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE );
|
||||||
|
}
|
||||||
|
return cacheRetrieveMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static CacheStoreMode determineCacheStoreMode(Map<String, Object> settings) {
|
private static CacheStoreMode determineCacheStoreMode(Map<String, Object> settings) {
|
||||||
return ( CacheStoreMode ) settings.get( JPA_SHARED_CACHE_STORE_MODE );
|
final CacheStoreMode cacheStoreMode = (CacheStoreMode) settings.get( JPA_SHARED_CACHE_STORE_MODE );
|
||||||
|
if ( cacheStoreMode == null ) {
|
||||||
|
return ( CacheStoreMode ) settings.get( JAKARTA_JPA_SHARED_CACHE_STORE_MODE );
|
||||||
|
}
|
||||||
|
return cacheStoreMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ConnectionObserverStatsBridge getDefaultJdbcObserver() {
|
public ConnectionObserverStatsBridge getDefaultJdbcObserver() {
|
||||||
|
|
|
@ -164,7 +164,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
private final String name;
|
private final String name;
|
||||||
private final String uuid;
|
private final String uuid;
|
||||||
|
|
||||||
private transient volatile boolean isClosed;
|
private transient volatile Status status = Status.OPEN;
|
||||||
|
|
||||||
private final transient SessionFactoryObserverChain observer = new SessionFactoryObserverChain();
|
private final transient SessionFactoryObserverChain observer = new SessionFactoryObserverChain();
|
||||||
|
|
||||||
|
@ -235,12 +235,17 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
|
|
||||||
this.properties = new HashMap<>();
|
this.properties = new HashMap<>();
|
||||||
this.properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
this.properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
||||||
if ( !properties.containsKey( AvailableSettings.JPA_VALIDATION_FACTORY ) ) {
|
if ( !properties.containsKey( AvailableSettings.JPA_VALIDATION_FACTORY )
|
||||||
|
&& !properties.containsKey( AvailableSettings.JAKARTA_JPA_VALIDATION_FACTORY ) ) {
|
||||||
if ( getSessionFactoryOptions().getValidatorFactoryReference() != null ) {
|
if ( getSessionFactoryOptions().getValidatorFactoryReference() != null ) {
|
||||||
properties.put(
|
properties.put(
|
||||||
AvailableSettings.JPA_VALIDATION_FACTORY,
|
AvailableSettings.JPA_VALIDATION_FACTORY,
|
||||||
getSessionFactoryOptions().getValidatorFactoryReference()
|
getSessionFactoryOptions().getValidatorFactoryReference()
|
||||||
);
|
);
|
||||||
|
properties.put(
|
||||||
|
AvailableSettings.JAKARTA_JPA_VALIDATION_FACTORY,
|
||||||
|
getSessionFactoryOptions().getValidatorFactoryReference()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -588,7 +593,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void validateNotClosed() {
|
protected void validateNotClosed() {
|
||||||
if ( isClosed ) {
|
if ( status == Status.CLOSED ) {
|
||||||
throw new IllegalStateException( "EntityManagerFactory is closed" );
|
throw new IllegalStateException( "EntityManagerFactory is closed" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -663,7 +668,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
}
|
}
|
||||||
|
|
||||||
private <K,V> Session buildEntityManager(final SynchronizationType synchronizationType, final Map<K,V> map) {
|
private <K,V> Session buildEntityManager(final SynchronizationType synchronizationType, final Map<K,V> map) {
|
||||||
assert !isClosed;
|
assert status != Status.CLOSED;
|
||||||
|
|
||||||
SessionBuilderImplementor builder = withOptions();
|
SessionBuilderImplementor builder = withOptions();
|
||||||
if ( synchronizationType == SynchronizationType.SYNCHRONIZED ) {
|
if ( synchronizationType == SynchronizationType.SYNCHRONIZED ) {
|
||||||
|
@ -732,7 +737,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isOpen() {
|
public boolean isOpen() {
|
||||||
return !isClosed;
|
return status != Status.CLOSED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -809,9 +814,10 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
* collector release the memory.
|
* collector release the memory.
|
||||||
* @throws HibernateException
|
* @throws HibernateException
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public void close() throws HibernateException {
|
public void close() throws HibernateException {
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
if ( isClosed ) {
|
if ( status != Status.OPEN ) {
|
||||||
if ( getSessionFactoryOptions().getJpaCompliance().isJpaClosedComplianceEnabled() ) {
|
if ( getSessionFactoryOptions().getJpaCompliance().isJpaClosedComplianceEnabled() ) {
|
||||||
throw new IllegalStateException( "EntityManagerFactory is already closed" );
|
throw new IllegalStateException( "EntityManagerFactory is already closed" );
|
||||||
}
|
}
|
||||||
|
@ -820,46 +826,54 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
isClosed = true;
|
status = Status.CLOSING;
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG.closing();
|
try {
|
||||||
observer.sessionFactoryClosing( this );
|
LOG.closing();
|
||||||
|
observer.sessionFactoryClosing( this );
|
||||||
|
|
||||||
// NOTE : the null checks below handle cases where close is called from
|
// NOTE : the null checks below handle cases where close is called from
|
||||||
// a failed attempt to create the SessionFactory
|
// a failed attempt to create the SessionFactory
|
||||||
|
|
||||||
if ( cacheAccess != null ) {
|
if ( cacheAccess != null ) {
|
||||||
cacheAccess.close();
|
cacheAccess.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( runtimeMetamodels != null && runtimeMetamodels.getMappingMetamodel() != null ) {
|
if ( runtimeMetamodels != null && runtimeMetamodels.getMappingMetamodel() != null ) {
|
||||||
final JdbcConnectionAccess jdbcConnectionAccess = jdbcServices.getBootstrapJdbcConnectionAccess();
|
final JdbcConnectionAccess jdbcConnectionAccess = jdbcServices.getBootstrapJdbcConnectionAccess();
|
||||||
runtimeMetamodels.getMappingMetamodel().visitEntityDescriptors(
|
runtimeMetamodels.getMappingMetamodel().visitEntityDescriptors(
|
||||||
entityPersister -> {
|
entityPersister -> {
|
||||||
if ( entityPersister.getSqmMultiTableMutationStrategy() != null ) {
|
if ( entityPersister.getSqmMultiTableMutationStrategy() != null ) {
|
||||||
entityPersister.getSqmMultiTableMutationStrategy().release( this, jdbcConnectionAccess );
|
entityPersister.getSqmMultiTableMutationStrategy().release(
|
||||||
|
this,
|
||||||
|
jdbcConnectionAccess
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
( (MappingMetamodelImpl) runtimeMetamodels.getMappingMetamodel() ).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( queryEngine != null ) {
|
||||||
|
queryEngine.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( delayedDropAction != null ) {
|
||||||
|
delayedDropAction.perform( serviceRegistry );
|
||||||
|
}
|
||||||
|
|
||||||
|
SessionFactoryRegistry.INSTANCE.removeSessionFactory(
|
||||||
|
getUuid(),
|
||||||
|
name,
|
||||||
|
settings.isSessionFactoryNameAlsoJndiName(),
|
||||||
|
serviceRegistry.getService( JndiService.class )
|
||||||
);
|
);
|
||||||
( (MappingMetamodelImpl) runtimeMetamodels.getMappingMetamodel() ).close();
|
|
||||||
}
|
}
|
||||||
|
finally {
|
||||||
if ( queryEngine != null ) {
|
status = Status.CLOSED;
|
||||||
queryEngine.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( delayedDropAction != null ) {
|
|
||||||
delayedDropAction.perform( serviceRegistry );
|
|
||||||
}
|
|
||||||
|
|
||||||
SessionFactoryRegistry.INSTANCE.removeSessionFactory(
|
|
||||||
getUuid(),
|
|
||||||
name,
|
|
||||||
settings.isSessionFactoryNameAlsoJndiName(),
|
|
||||||
serviceRegistry.getService( JndiService.class )
|
|
||||||
);
|
|
||||||
|
|
||||||
observer.sessionFactoryClosed( this );
|
observer.sessionFactoryClosed( this );
|
||||||
serviceRegistry.destroy();
|
serviceRegistry.destroy();
|
||||||
}
|
}
|
||||||
|
@ -956,8 +970,9 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
getMetamodel().addNamedEntityGraph( graphName, (RootGraphImplementor<T>) entityGraph );
|
getMetamodel().addNamedEntityGraph( graphName, (RootGraphImplementor<T>) entityGraph );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public boolean isClosed() {
|
public boolean isClosed() {
|
||||||
return isClosed;
|
return status == Status.CLOSED;
|
||||||
}
|
}
|
||||||
|
|
||||||
private transient StatisticsImplementor statistics;
|
private transient StatisticsImplementor statistics;
|
||||||
|
@ -1624,6 +1639,8 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
private void maskOutSensitiveInformation(Map<String, Object> props) {
|
private void maskOutSensitiveInformation(Map<String, Object> props) {
|
||||||
maskOutIfSet( props, AvailableSettings.JPA_JDBC_USER );
|
maskOutIfSet( props, AvailableSettings.JPA_JDBC_USER );
|
||||||
maskOutIfSet( props, AvailableSettings.JPA_JDBC_PASSWORD );
|
maskOutIfSet( props, AvailableSettings.JPA_JDBC_PASSWORD );
|
||||||
|
maskOutIfSet( props, AvailableSettings.JAKARTA_JPA_JDBC_USER );
|
||||||
|
maskOutIfSet( props, AvailableSettings.JAKARTA_JPA_JDBC_PASSWORD );
|
||||||
maskOutIfSet( props, AvailableSettings.USER );
|
maskOutIfSet( props, AvailableSettings.USER );
|
||||||
maskOutIfSet( props, AvailableSettings.PASS );
|
maskOutIfSet( props, AvailableSettings.PASS );
|
||||||
}
|
}
|
||||||
|
@ -1658,4 +1675,9 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
return this.fastSessionServices;
|
return this.fastSessionServices;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private enum Status {
|
||||||
|
OPEN,
|
||||||
|
CLOSING,
|
||||||
|
CLOSED;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -139,6 +139,10 @@ import org.hibernate.stat.SessionStatistics;
|
||||||
import org.hibernate.stat.internal.SessionStatisticsImpl;
|
import org.hibernate.stat.internal.SessionStatisticsImpl;
|
||||||
import org.hibernate.stat.spi.StatisticsImplementor;
|
import org.hibernate.stat.spi.StatisticsImplementor;
|
||||||
|
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_SCOPE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_TIMEOUT;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_SHARED_CACHE_STORE_MODE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_RETRIEVE_MODE;
|
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_RETRIEVE_MODE;
|
||||||
|
@ -269,8 +273,28 @@ public class SessionImpl
|
||||||
if ( ( queryTimeout = getSessionProperty( QueryHints.SPEC_HINT_TIMEOUT ) ) != null ) {
|
if ( ( queryTimeout = getSessionProperty( QueryHints.SPEC_HINT_TIMEOUT ) ) != null ) {
|
||||||
query.setHint( QueryHints.SPEC_HINT_TIMEOUT, queryTimeout );
|
query.setHint( QueryHints.SPEC_HINT_TIMEOUT, queryTimeout );
|
||||||
}
|
}
|
||||||
|
final Object jakartaQueryTimeout;
|
||||||
|
if ( ( jakartaQueryTimeout = getSessionProperty( QueryHints.JAKARTA_SPEC_HINT_TIMEOUT ) ) != null ) {
|
||||||
|
query.setHint( QueryHints.JAKARTA_SPEC_HINT_TIMEOUT, jakartaQueryTimeout );
|
||||||
|
}
|
||||||
final Object lockTimeout;
|
final Object lockTimeout;
|
||||||
if ( ( lockTimeout = getSessionProperty( JPA_LOCK_TIMEOUT ) ) != null ) {
|
final Object jpaLockTimeout = getSessionProperty( JPA_LOCK_TIMEOUT );
|
||||||
|
if ( jpaLockTimeout == null ) {
|
||||||
|
lockTimeout = getSessionProperty( JAKARTA_JPA_LOCK_TIMEOUT );
|
||||||
|
}
|
||||||
|
else if ( Integer.valueOf( LockOptions.WAIT_FOREVER ).equals( jpaLockTimeout ) ) {
|
||||||
|
final Object jakartaLockTimeout = getSessionProperty( JAKARTA_JPA_LOCK_TIMEOUT );
|
||||||
|
if ( jakartaLockTimeout == null ) {
|
||||||
|
lockTimeout = jpaLockTimeout;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
lockTimeout = jakartaLockTimeout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
lockTimeout = jpaLockTimeout;
|
||||||
|
}
|
||||||
|
if ( lockTimeout != null ) {
|
||||||
query.setHint( JPA_LOCK_TIMEOUT, lockTimeout );
|
query.setHint( JPA_LOCK_TIMEOUT, lockTimeout );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2323,11 +2347,19 @@ public class SessionImpl
|
||||||
}
|
}
|
||||||
|
|
||||||
private static CacheRetrieveMode determineCacheRetrieveMode(Map<String, Object> settings) {
|
private static CacheRetrieveMode determineCacheRetrieveMode(Map<String, Object> settings) {
|
||||||
return ( CacheRetrieveMode ) settings.get( JPA_SHARED_CACHE_RETRIEVE_MODE );
|
final CacheRetrieveMode cacheRetrieveMode = (CacheRetrieveMode) settings.get( JPA_SHARED_CACHE_RETRIEVE_MODE );
|
||||||
|
if ( cacheRetrieveMode == null ) {
|
||||||
|
return (CacheRetrieveMode) settings.get( JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE );
|
||||||
|
}
|
||||||
|
return cacheRetrieveMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static CacheStoreMode determineCacheStoreMode(Map<String, Object> settings) {
|
private static CacheStoreMode determineCacheStoreMode(Map<String, Object> settings) {
|
||||||
return ( CacheStoreMode ) settings.get( JPA_SHARED_CACHE_STORE_MODE );
|
final CacheStoreMode cacheStoreMode = (CacheStoreMode) settings.get( JPA_SHARED_CACHE_STORE_MODE );
|
||||||
|
if ( cacheStoreMode == null ) {
|
||||||
|
return ( CacheStoreMode ) settings.get( JAKARTA_JPA_SHARED_CACHE_STORE_MODE );
|
||||||
|
}
|
||||||
|
return cacheStoreMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkTransactionNeededForUpdateOperation() {
|
private void checkTransactionNeededForUpdateOperation() {
|
||||||
|
@ -2485,10 +2517,14 @@ public class SessionImpl
|
||||||
if ( AvailableSettings.FLUSH_MODE.equals( propertyName ) ) {
|
if ( AvailableSettings.FLUSH_MODE.equals( propertyName ) ) {
|
||||||
setHibernateFlushMode( ConfigurationHelper.getFlushMode( value, FlushMode.AUTO ) );
|
setHibernateFlushMode( ConfigurationHelper.getFlushMode( value, FlushMode.AUTO ) );
|
||||||
}
|
}
|
||||||
else if ( JPA_LOCK_SCOPE.equals( propertyName ) || JPA_LOCK_TIMEOUT.equals( propertyName ) ) {
|
else if ( JPA_LOCK_SCOPE.equals( propertyName ) || JPA_LOCK_TIMEOUT.equals( propertyName )
|
||||||
|
|| JAKARTA_JPA_LOCK_SCOPE.equals( propertyName ) || JAKARTA_JPA_LOCK_TIMEOUT.equals( propertyName ) ) {
|
||||||
LockOptionsHelper.applyPropertiesToLockOptions( properties, this::getLockOptionsForWrite );
|
LockOptionsHelper.applyPropertiesToLockOptions( properties, this::getLockOptionsForWrite );
|
||||||
}
|
}
|
||||||
else if ( JPA_SHARED_CACHE_RETRIEVE_MODE.equals( propertyName ) || JPA_SHARED_CACHE_STORE_MODE.equals( propertyName ) ) {
|
else if ( JPA_SHARED_CACHE_RETRIEVE_MODE.equals( propertyName )
|
||||||
|
|| JPA_SHARED_CACHE_STORE_MODE.equals( propertyName )
|
||||||
|
|| JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE.equals( propertyName )
|
||||||
|
|| JAKARTA_JPA_SHARED_CACHE_STORE_MODE.equals( propertyName ) ) {
|
||||||
setCacheMode(
|
setCacheMode(
|
||||||
CacheModeHelper.interpretCacheMode(
|
CacheModeHelper.interpretCacheMode(
|
||||||
determineCacheStoreMode( properties ),
|
determineCacheStoreMode( properties ),
|
||||||
|
|
|
@ -23,6 +23,7 @@ import static org.hibernate.annotations.QueryHints.NATIVE_SPACES;
|
||||||
import static org.hibernate.annotations.QueryHints.PASS_DISTINCT_THROUGH;
|
import static org.hibernate.annotations.QueryHints.PASS_DISTINCT_THROUGH;
|
||||||
import static org.hibernate.annotations.QueryHints.READ_ONLY;
|
import static org.hibernate.annotations.QueryHints.READ_ONLY;
|
||||||
import static org.hibernate.annotations.QueryHints.TIMEOUT_HIBERNATE;
|
import static org.hibernate.annotations.QueryHints.TIMEOUT_HIBERNATE;
|
||||||
|
import static org.hibernate.annotations.QueryHints.TIMEOUT_JAKARTA_JPA;
|
||||||
import static org.hibernate.annotations.QueryHints.TIMEOUT_JPA;
|
import static org.hibernate.annotations.QueryHints.TIMEOUT_JPA;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -42,6 +43,11 @@ public class QueryHints {
|
||||||
*/
|
*/
|
||||||
public static final String SPEC_HINT_TIMEOUT = TIMEOUT_JPA;
|
public static final String SPEC_HINT_TIMEOUT = TIMEOUT_JPA;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The hint key for specifying a query timeout per JPA, which defines the timeout in milliseconds
|
||||||
|
*/
|
||||||
|
public static final String JAKARTA_SPEC_HINT_TIMEOUT = TIMEOUT_JAKARTA_JPA;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The hint key for specifying a comment which is to be embedded into the SQL sent to the database.
|
* The hint key for specifying a comment which is to be embedded into the SQL sent to the database.
|
||||||
*/
|
*/
|
||||||
|
@ -100,6 +106,22 @@ public class QueryHints {
|
||||||
*/
|
*/
|
||||||
public static final String HINT_LOADGRAPH = GraphSemantic.LOAD.getJpaHintName();
|
public static final String HINT_LOADGRAPH = GraphSemantic.LOAD.getJpaHintName();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hint providing a "fetchgraph" EntityGraph. Attributes explicitly specified as AttributeNodes are treated as
|
||||||
|
* FetchType.EAGER (via join fetch or subsequent select).
|
||||||
|
*
|
||||||
|
* Note: Currently, attributes that are not specified are treated as FetchType.LAZY or FetchType.EAGER depending
|
||||||
|
* on the attribute's definition in metadata, rather than forcing FetchType.LAZY.
|
||||||
|
*/
|
||||||
|
public static final String JAKARTA_HINT_FETCHGRAPH = GraphSemantic.FETCH.getJakartaJpaHintName();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hint providing a "loadgraph" EntityGraph. Attributes explicitly specified as AttributeNodes are treated as
|
||||||
|
* FetchType.EAGER (via join fetch or subsequent select). Attributes that are not specified are treated as
|
||||||
|
* FetchType.LAZY or FetchType.EAGER depending on the attribute's definition in metadata
|
||||||
|
*/
|
||||||
|
public static final String JAKARTA_HINT_LOADGRAPH = GraphSemantic.LOAD.getJakartaJpaHintName();
|
||||||
|
|
||||||
public static final String HINT_FOLLOW_ON_LOCKING = FOLLOW_ON_LOCKING;
|
public static final String HINT_FOLLOW_ON_LOCKING = FOLLOW_ON_LOCKING;
|
||||||
|
|
||||||
public static final String HINT_PASS_DISTINCT_THROUGH = PASS_DISTINCT_THROUGH;
|
public static final String HINT_PASS_DISTINCT_THROUGH = PASS_DISTINCT_THROUGH;
|
||||||
|
@ -113,6 +135,7 @@ public class QueryHints {
|
||||||
HashSet<String> hints = new HashSet<>();
|
HashSet<String> hints = new HashSet<>();
|
||||||
hints.add( HINT_TIMEOUT );
|
hints.add( HINT_TIMEOUT );
|
||||||
hints.add( SPEC_HINT_TIMEOUT );
|
hints.add( SPEC_HINT_TIMEOUT );
|
||||||
|
hints.add( JAKARTA_SPEC_HINT_TIMEOUT );
|
||||||
hints.add( HINT_COMMENT );
|
hints.add( HINT_COMMENT );
|
||||||
hints.add( HINT_FETCH_SIZE );
|
hints.add( HINT_FETCH_SIZE );
|
||||||
hints.add( HINT_CACHE_REGION );
|
hints.add( HINT_CACHE_REGION );
|
||||||
|
@ -123,6 +146,8 @@ public class QueryHints {
|
||||||
hints.add( HINT_NATIVE_LOCKMODE );
|
hints.add( HINT_NATIVE_LOCKMODE );
|
||||||
hints.add( HINT_FETCHGRAPH );
|
hints.add( HINT_FETCHGRAPH );
|
||||||
hints.add( HINT_LOADGRAPH );
|
hints.add( HINT_LOADGRAPH );
|
||||||
|
hints.add( JAKARTA_HINT_FETCHGRAPH );
|
||||||
|
hints.add( JAKARTA_HINT_LOADGRAPH );
|
||||||
hints.add( HINT_NATIVE_SPACES );
|
hints.add( HINT_NATIVE_SPACES );
|
||||||
return java.util.Collections.unmodifiableSet( hints );
|
return java.util.Collections.unmodifiableSet( hints );
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,6 +92,15 @@ import static org.hibernate.cfg.AvailableSettings.DRIVER;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JACC_CONTEXT_ID;
|
import static org.hibernate.cfg.AvailableSettings.JACC_CONTEXT_ID;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JACC_ENABLED;
|
import static org.hibernate.cfg.AvailableSettings.JACC_ENABLED;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JACC_PREFIX;
|
import static org.hibernate.cfg.AvailableSettings.JACC_PREFIX;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_JDBC_DRIVER;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_JDBC_PASSWORD;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_JDBC_URL;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_JDBC_USER;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_JTA_DATASOURCE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_NON_JTA_DATASOURCE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_SHARED_CACHE_MODE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_TRANSACTION_TYPE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_VALIDATION_MODE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_DRIVER;
|
import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_DRIVER;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_PASSWORD;
|
import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_PASSWORD;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_URL;
|
import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_URL;
|
||||||
|
@ -260,7 +269,13 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
metamodelBuilder.getBootstrapContext()
|
metamodelBuilder.getBootstrapContext()
|
||||||
);
|
);
|
||||||
|
|
||||||
withValidatorFactory( configurationValues.get( org.hibernate.cfg.AvailableSettings.JPA_VALIDATION_FACTORY ) );
|
final Object validatorFactory = configurationValues.get( org.hibernate.cfg.AvailableSettings.JPA_VALIDATION_FACTORY );
|
||||||
|
if ( validatorFactory == null ) {
|
||||||
|
withValidatorFactory( configurationValues.get( org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_VALIDATION_FACTORY ) );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
withValidatorFactory( validatorFactory );
|
||||||
|
}
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
// push back class transformation to the environment; for the time being this only has any effect in EE
|
// push back class transformation to the environment; for the time being this only has any effect in EE
|
||||||
|
@ -574,20 +589,30 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
|
|
||||||
// normalize ValidationMode
|
// normalize ValidationMode
|
||||||
final Object intgValidationMode = integrationSettingsCopy.remove( JPA_VALIDATION_MODE );
|
final Object intgValidationMode = integrationSettingsCopy.remove( JPA_VALIDATION_MODE );
|
||||||
|
final Object jakartaIntgValidationMode = integrationSettingsCopy.remove( JAKARTA_JPA_VALIDATION_MODE );
|
||||||
if ( intgValidationMode != null ) {
|
if ( intgValidationMode != null ) {
|
||||||
mergedSettings.configurationValues.put( JPA_VALIDATION_MODE, intgValidationMode );
|
mergedSettings.configurationValues.put( JPA_VALIDATION_MODE, intgValidationMode );
|
||||||
}
|
}
|
||||||
|
else if ( jakartaIntgValidationMode != null ) {
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_VALIDATION_MODE, jakartaIntgValidationMode );
|
||||||
|
}
|
||||||
else if ( persistenceUnit.getValidationMode() != null ) {
|
else if ( persistenceUnit.getValidationMode() != null ) {
|
||||||
mergedSettings.configurationValues.put( JPA_VALIDATION_MODE, persistenceUnit.getValidationMode() );
|
mergedSettings.configurationValues.put( JPA_VALIDATION_MODE, persistenceUnit.getValidationMode() );
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_VALIDATION_MODE, persistenceUnit.getValidationMode() );
|
||||||
}
|
}
|
||||||
|
|
||||||
// normalize SharedCacheMode
|
// normalize SharedCacheMode
|
||||||
final Object intgCacheMode = integrationSettingsCopy.remove( JPA_SHARED_CACHE_MODE );
|
final Object intgCacheMode = integrationSettingsCopy.remove( JPA_SHARED_CACHE_MODE );
|
||||||
|
final Object jakartaIntgCacheMode = integrationSettingsCopy.remove( JAKARTA_JPA_SHARED_CACHE_MODE );
|
||||||
if ( intgCacheMode != null ) {
|
if ( intgCacheMode != null ) {
|
||||||
mergedSettings.configurationValues.put( JPA_SHARED_CACHE_MODE, intgCacheMode );
|
mergedSettings.configurationValues.put( JPA_SHARED_CACHE_MODE, intgCacheMode );
|
||||||
}
|
}
|
||||||
|
else if ( jakartaIntgCacheMode != null ) {
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_SHARED_CACHE_MODE, jakartaIntgCacheMode );
|
||||||
|
}
|
||||||
else if ( persistenceUnit.getSharedCacheMode() != null ) {
|
else if ( persistenceUnit.getSharedCacheMode() != null ) {
|
||||||
mergedSettings.configurationValues.put( JPA_SHARED_CACHE_MODE, persistenceUnit.getSharedCacheMode() );
|
mergedSettings.configurationValues.put( JPA_SHARED_CACHE_MODE, persistenceUnit.getSharedCacheMode() );
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_SHARED_CACHE_MODE, persistenceUnit.getSharedCacheMode() );
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply all "integration overrides" as the last step. By specification,
|
// Apply all "integration overrides" as the last step. By specification,
|
||||||
|
@ -620,16 +645,20 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
final Object effectiveUser = NullnessHelper.coalesceSuppliedValues(
|
final Object effectiveUser = NullnessHelper.coalesceSuppliedValues(
|
||||||
() -> integrationSettingsCopy.remove( USER ),
|
() -> integrationSettingsCopy.remove( USER ),
|
||||||
() -> integrationSettingsCopy.remove( JPA_JDBC_USER ),
|
() -> integrationSettingsCopy.remove( JPA_JDBC_USER ),
|
||||||
|
() -> integrationSettingsCopy.remove( JAKARTA_JPA_JDBC_USER ),
|
||||||
() -> extractPuProperty( persistenceUnit, USER ),
|
() -> extractPuProperty( persistenceUnit, USER ),
|
||||||
() -> extractPuProperty( persistenceUnit, JPA_JDBC_USER )
|
() -> extractPuProperty( persistenceUnit, JPA_JDBC_USER ),
|
||||||
|
() -> extractPuProperty( persistenceUnit, JAKARTA_JPA_JDBC_USER )
|
||||||
);
|
);
|
||||||
|
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
final Object effectivePass = NullnessHelper.coalesceSuppliedValues(
|
final Object effectivePass = NullnessHelper.coalesceSuppliedValues(
|
||||||
() -> integrationSettingsCopy.remove( PASS ),
|
() -> integrationSettingsCopy.remove( PASS ),
|
||||||
() -> integrationSettingsCopy.remove( JPA_JDBC_PASSWORD ),
|
() -> integrationSettingsCopy.remove( JPA_JDBC_PASSWORD ),
|
||||||
|
() -> integrationSettingsCopy.remove( JAKARTA_JPA_JDBC_PASSWORD ),
|
||||||
() -> extractPuProperty( persistenceUnit, PASS ),
|
() -> extractPuProperty( persistenceUnit, PASS ),
|
||||||
() -> extractPuProperty( persistenceUnit, JPA_JDBC_PASSWORD )
|
() -> extractPuProperty( persistenceUnit, JPA_JDBC_PASSWORD ),
|
||||||
|
() -> extractPuProperty( persistenceUnit, JAKARTA_JPA_JDBC_PASSWORD )
|
||||||
);
|
);
|
||||||
|
|
||||||
if ( effectiveUser != null || effectivePass != null ) {
|
if ( effectiveUser != null || effectivePass != null ) {
|
||||||
|
@ -647,11 +676,13 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
if ( effectiveUser != null ) {
|
if ( effectiveUser != null ) {
|
||||||
mergedSettings.configurationValues.put( USER, effectiveUser );
|
mergedSettings.configurationValues.put( USER, effectiveUser );
|
||||||
mergedSettings.configurationValues.put( JPA_JDBC_USER, effectiveUser );
|
mergedSettings.configurationValues.put( JPA_JDBC_USER, effectiveUser );
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_JDBC_USER, effectiveUser );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( effectivePass != null ) {
|
if ( effectivePass != null ) {
|
||||||
mergedSettings.configurationValues.put( PASS, effectivePass );
|
mergedSettings.configurationValues.put( PASS, effectivePass );
|
||||||
mergedSettings.configurationValues.put( JPA_JDBC_PASSWORD, effectivePass );
|
mergedSettings.configurationValues.put( JPA_JDBC_PASSWORD, effectivePass );
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_JDBC_PASSWORD, effectivePass );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -664,7 +695,10 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
MergedSettings mergedSettings) {
|
MergedSettings mergedSettings) {
|
||||||
PersistenceUnitTransactionType txnType = null;
|
PersistenceUnitTransactionType txnType = null;
|
||||||
|
|
||||||
final Object intgTxnType = integrationSettingsCopy.remove( JPA_TRANSACTION_TYPE );
|
Object intgTxnType = integrationSettingsCopy.remove( JPA_TRANSACTION_TYPE );
|
||||||
|
if ( intgTxnType == null ) {
|
||||||
|
intgTxnType = integrationSettingsCopy.remove( JAKARTA_JPA_TRANSACTION_TYPE );
|
||||||
|
}
|
||||||
|
|
||||||
if ( intgTxnType != null ) {
|
if ( intgTxnType != null ) {
|
||||||
txnType = PersistenceUnitTransactionTypeHelper.interpretTransactionType( intgTxnType );
|
txnType = PersistenceUnitTransactionTypeHelper.interpretTransactionType( intgTxnType );
|
||||||
|
@ -673,7 +707,10 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
txnType = persistenceUnit.getTransactionType();
|
txnType = persistenceUnit.getTransactionType();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
final Object puPropTxnType = mergedSettings.configurationValues.get( JPA_TRANSACTION_TYPE );
|
Object puPropTxnType = mergedSettings.configurationValues.get( JPA_TRANSACTION_TYPE );
|
||||||
|
if ( puPropTxnType == null ) {
|
||||||
|
puPropTxnType = mergedSettings.configurationValues.get( JAKARTA_JPA_TRANSACTION_TYPE );
|
||||||
|
}
|
||||||
if ( puPropTxnType != null ) {
|
if ( puPropTxnType != null ) {
|
||||||
txnType = PersistenceUnitTransactionTypeHelper.interpretTransactionType( puPropTxnType );
|
txnType = PersistenceUnitTransactionTypeHelper.interpretTransactionType( puPropTxnType );
|
||||||
}
|
}
|
||||||
|
@ -764,6 +801,21 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ( integrationSettingsCopy.containsKey( JAKARTA_JPA_JTA_DATASOURCE ) ) {
|
||||||
|
final Object dataSourceRef = integrationSettingsCopy.remove( JAKARTA_JPA_JTA_DATASOURCE );
|
||||||
|
if ( dataSourceRef != null ) {
|
||||||
|
applyDataSource(
|
||||||
|
dataSourceRef,
|
||||||
|
true,
|
||||||
|
integrationSettingsCopy,
|
||||||
|
mergedSettings
|
||||||
|
);
|
||||||
|
|
||||||
|
// EARLY EXIT!!
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if ( integrationSettingsCopy.containsKey( JPA_NON_JTA_DATASOURCE ) ) {
|
if ( integrationSettingsCopy.containsKey( JPA_NON_JTA_DATASOURCE ) ) {
|
||||||
final Object dataSourceRef = integrationSettingsCopy.remove( JPA_NON_JTA_DATASOURCE );
|
final Object dataSourceRef = integrationSettingsCopy.remove( JPA_NON_JTA_DATASOURCE );
|
||||||
|
|
||||||
|
@ -778,6 +830,20 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ( integrationSettingsCopy.containsKey( JAKARTA_JPA_NON_JTA_DATASOURCE ) ) {
|
||||||
|
final Object dataSourceRef = integrationSettingsCopy.remove( JAKARTA_JPA_NON_JTA_DATASOURCE );
|
||||||
|
|
||||||
|
applyDataSource(
|
||||||
|
dataSourceRef,
|
||||||
|
false,
|
||||||
|
integrationSettingsCopy,
|
||||||
|
mergedSettings
|
||||||
|
);
|
||||||
|
|
||||||
|
// EARLY EXIT!!
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if ( integrationSettingsCopy.containsKey( URL ) ) {
|
if ( integrationSettingsCopy.containsKey( URL ) ) {
|
||||||
// these have precedence over the JPA ones
|
// these have precedence over the JPA ones
|
||||||
final Object integrationJdbcUrl = integrationSettingsCopy.get( URL );
|
final Object integrationJdbcUrl = integrationSettingsCopy.get( URL );
|
||||||
|
@ -788,8 +854,10 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
NullnessHelper.coalesceSuppliedValues(
|
NullnessHelper.coalesceSuppliedValues(
|
||||||
() -> ConfigurationHelper.getString( DRIVER, integrationSettingsCopy ),
|
() -> ConfigurationHelper.getString( DRIVER, integrationSettingsCopy ),
|
||||||
() -> ConfigurationHelper.getString( JPA_JDBC_DRIVER, integrationSettingsCopy ),
|
() -> ConfigurationHelper.getString( JPA_JDBC_DRIVER, integrationSettingsCopy ),
|
||||||
|
() -> ConfigurationHelper.getString( JAKARTA_JPA_JDBC_DRIVER, integrationSettingsCopy ),
|
||||||
() -> ConfigurationHelper.getString( DRIVER, mergedSettings.configurationValues ),
|
() -> ConfigurationHelper.getString( DRIVER, mergedSettings.configurationValues ),
|
||||||
() -> ConfigurationHelper.getString( JPA_JDBC_DRIVER, mergedSettings.configurationValues )
|
() -> ConfigurationHelper.getString( JPA_JDBC_DRIVER, mergedSettings.configurationValues ),
|
||||||
|
() -> ConfigurationHelper.getString( JAKARTA_JPA_JDBC_DRIVER, mergedSettings.configurationValues )
|
||||||
),
|
),
|
||||||
integrationSettingsCopy,
|
integrationSettingsCopy,
|
||||||
mergedSettings
|
mergedSettings
|
||||||
|
@ -820,6 +888,26 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ( integrationSettingsCopy.containsKey( JAKARTA_JPA_JDBC_URL ) ) {
|
||||||
|
final Object integrationJdbcUrl = integrationSettingsCopy.get( JAKARTA_JPA_JDBC_URL );
|
||||||
|
|
||||||
|
if ( integrationJdbcUrl != null ) {
|
||||||
|
//noinspection unchecked
|
||||||
|
applyJdbcSettings(
|
||||||
|
integrationJdbcUrl,
|
||||||
|
NullnessHelper.coalesceSuppliedValues(
|
||||||
|
() -> ConfigurationHelper.getString( JAKARTA_JPA_JDBC_DRIVER, integrationSettingsCopy ),
|
||||||
|
() -> ConfigurationHelper.getString( JAKARTA_JPA_JDBC_DRIVER, mergedSettings.configurationValues )
|
||||||
|
),
|
||||||
|
integrationSettingsCopy,
|
||||||
|
mergedSettings
|
||||||
|
);
|
||||||
|
|
||||||
|
// EARLY EXIT!!
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if ( persistenceUnit.getJtaDataSource() != null ) {
|
if ( persistenceUnit.getJtaDataSource() != null ) {
|
||||||
applyDataSource(
|
applyDataSource(
|
||||||
persistenceUnit.getJtaDataSource(),
|
persistenceUnit.getJtaDataSource(),
|
||||||
|
@ -876,6 +964,22 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ( mergedSettings.configurationValues.containsKey( JAKARTA_JPA_JDBC_URL ) ) {
|
||||||
|
final Object url = mergedSettings.configurationValues.get( JAKARTA_JPA_JDBC_URL );
|
||||||
|
|
||||||
|
if ( url != null && ( ! ( url instanceof String ) || StringHelper.isNotEmpty( (String) url ) ) ) {
|
||||||
|
applyJdbcSettings(
|
||||||
|
url,
|
||||||
|
ConfigurationHelper.getString( JAKARTA_JPA_JDBC_DRIVER, mergedSettings.configurationValues ),
|
||||||
|
integrationSettingsCopy,
|
||||||
|
mergedSettings
|
||||||
|
);
|
||||||
|
|
||||||
|
// EARLY EXIT!!
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// any other conditions to account for?
|
// any other conditions to account for?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -896,31 +1000,48 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
// add to EMF properties (questionable - see HHH-13432)
|
// add to EMF properties (questionable - see HHH-13432)
|
||||||
final String emfKey;
|
final String emfKey;
|
||||||
final String inverseEmfKey;
|
final String inverseEmfKey;
|
||||||
|
final String jakartaEmfKey;
|
||||||
|
final String jakartaInverseEmfKey;
|
||||||
if ( isJta ) {
|
if ( isJta ) {
|
||||||
emfKey = JPA_JTA_DATASOURCE;
|
emfKey = JPA_JTA_DATASOURCE;
|
||||||
|
jakartaEmfKey = JAKARTA_JPA_JTA_DATASOURCE;
|
||||||
inverseEmfKey = JPA_NON_JTA_DATASOURCE;
|
inverseEmfKey = JPA_NON_JTA_DATASOURCE;
|
||||||
|
jakartaInverseEmfKey = JAKARTA_JPA_NON_JTA_DATASOURCE;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
emfKey = JPA_NON_JTA_DATASOURCE;
|
emfKey = JPA_NON_JTA_DATASOURCE;
|
||||||
|
jakartaEmfKey = JAKARTA_JPA_NON_JTA_DATASOURCE;
|
||||||
inverseEmfKey = JPA_JTA_DATASOURCE;
|
inverseEmfKey = JPA_JTA_DATASOURCE;
|
||||||
|
jakartaInverseEmfKey = JAKARTA_JPA_JTA_DATASOURCE;
|
||||||
}
|
}
|
||||||
mergedSettings.configurationValues.put( emfKey, dataSourceRef );
|
mergedSettings.configurationValues.put( emfKey, dataSourceRef );
|
||||||
|
mergedSettings.configurationValues.put( jakartaEmfKey, dataSourceRef );
|
||||||
|
|
||||||
// clear any settings logically overridden by this datasource
|
// clear any settings logically overridden by this datasource
|
||||||
cleanUpConfigKeys(
|
cleanUpConfigKeys(
|
||||||
integrationSettingsCopy,
|
integrationSettingsCopy,
|
||||||
mergedSettings,
|
mergedSettings,
|
||||||
inverseEmfKey,
|
inverseEmfKey,
|
||||||
|
jakartaInverseEmfKey,
|
||||||
JPA_JDBC_DRIVER,
|
JPA_JDBC_DRIVER,
|
||||||
|
JAKARTA_JPA_JDBC_DRIVER,
|
||||||
DRIVER,
|
DRIVER,
|
||||||
JPA_JDBC_URL,
|
JPA_JDBC_URL,
|
||||||
|
JAKARTA_JPA_JDBC_URL,
|
||||||
URL
|
URL
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
// clean-up the entries in the "integration overrides" so they do not get get picked
|
// clean-up the entries in the "integration overrides" so they do not get get picked
|
||||||
// up in the general "integration overrides" handling
|
// up in the general "integration overrides" handling
|
||||||
cleanUpConfigKeys( integrationSettingsCopy, DATASOURCE, JPA_JTA_DATASOURCE, JPA_NON_JTA_DATASOURCE );
|
cleanUpConfigKeys(
|
||||||
|
integrationSettingsCopy,
|
||||||
|
DATASOURCE,
|
||||||
|
JPA_JTA_DATASOURCE,
|
||||||
|
JAKARTA_JPA_JTA_DATASOURCE,
|
||||||
|
JPA_NON_JTA_DATASOURCE,
|
||||||
|
JAKARTA_JPA_NON_JTA_DATASOURCE
|
||||||
|
);
|
||||||
|
|
||||||
// add under Hibernate's DATASOURCE setting where the ConnectionProvider will find it
|
// add under Hibernate's DATASOURCE setting where the ConnectionProvider will find it
|
||||||
mergedSettings.configurationValues.put( DATASOURCE, dataSourceRef );
|
mergedSettings.configurationValues.put( DATASOURCE, dataSourceRef );
|
||||||
|
@ -954,14 +1075,17 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
MergedSettings mergedSettings) {
|
MergedSettings mergedSettings) {
|
||||||
mergedSettings.configurationValues.put( URL, url );
|
mergedSettings.configurationValues.put( URL, url );
|
||||||
mergedSettings.configurationValues.put( JPA_JDBC_URL, url );
|
mergedSettings.configurationValues.put( JPA_JDBC_URL, url );
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_JDBC_URL, url );
|
||||||
|
|
||||||
if ( driver != null ) {
|
if ( driver != null ) {
|
||||||
mergedSettings.configurationValues.put( DRIVER, driver );
|
mergedSettings.configurationValues.put( DRIVER, driver );
|
||||||
mergedSettings.configurationValues.put( JPA_JDBC_DRIVER, driver );
|
mergedSettings.configurationValues.put( JPA_JDBC_DRIVER, driver );
|
||||||
|
mergedSettings.configurationValues.put( JAKARTA_JPA_JDBC_DRIVER, driver );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
mergedSettings.configurationValues.remove( DRIVER );
|
mergedSettings.configurationValues.remove( DRIVER );
|
||||||
mergedSettings.configurationValues.remove( JPA_JDBC_DRIVER );
|
mergedSettings.configurationValues.remove( JPA_JDBC_DRIVER );
|
||||||
|
mergedSettings.configurationValues.remove( JAKARTA_JPA_JDBC_DRIVER );
|
||||||
}
|
}
|
||||||
|
|
||||||
// clean up the integration-map values
|
// clean up the integration-map values
|
||||||
|
@ -969,12 +1093,16 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
integrationSettingsCopy,
|
integrationSettingsCopy,
|
||||||
DRIVER,
|
DRIVER,
|
||||||
JPA_JDBC_DRIVER,
|
JPA_JDBC_DRIVER,
|
||||||
|
JAKARTA_JPA_JDBC_DRIVER,
|
||||||
URL,
|
URL,
|
||||||
JPA_JDBC_URL,
|
JPA_JDBC_URL,
|
||||||
|
JAKARTA_JPA_JDBC_URL,
|
||||||
USER,
|
USER,
|
||||||
JPA_JDBC_USER,
|
JPA_JDBC_USER,
|
||||||
|
JAKARTA_JPA_JDBC_USER,
|
||||||
PASS,
|
PASS,
|
||||||
JPA_JDBC_PASSWORD
|
JPA_JDBC_PASSWORD,
|
||||||
|
JAKARTA_JPA_JDBC_PASSWORD
|
||||||
);
|
);
|
||||||
|
|
||||||
cleanUpConfigKeys(
|
cleanUpConfigKeys(
|
||||||
|
@ -982,7 +1110,9 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
mergedSettings,
|
mergedSettings,
|
||||||
DATASOURCE,
|
DATASOURCE,
|
||||||
JPA_JTA_DATASOURCE,
|
JPA_JTA_DATASOURCE,
|
||||||
JPA_NON_JTA_DATASOURCE
|
JAKARTA_JPA_JTA_DATASOURCE,
|
||||||
|
JPA_NON_JTA_DATASOURCE,
|
||||||
|
JAKARTA_JPA_NON_JTA_DATASOURCE
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -269,16 +269,29 @@ public class PersistenceXmlParser {
|
||||||
if ( integration.containsKey( AvailableSettings.JPA_PERSISTENCE_PROVIDER ) ) {
|
if ( integration.containsKey( AvailableSettings.JPA_PERSISTENCE_PROVIDER ) ) {
|
||||||
persistenceUnit.setProviderClassName( (String) integration.get( AvailableSettings.JPA_PERSISTENCE_PROVIDER ) );
|
persistenceUnit.setProviderClassName( (String) integration.get( AvailableSettings.JPA_PERSISTENCE_PROVIDER ) );
|
||||||
}
|
}
|
||||||
|
else if ( integration.containsKey( AvailableSettings.JAKARTA_JPA_PERSISTENCE_PROVIDER ) ) {
|
||||||
|
persistenceUnit.setProviderClassName( (String) integration.get( AvailableSettings.JAKARTA_JPA_PERSISTENCE_PROVIDER ) );
|
||||||
|
}
|
||||||
if ( integration.containsKey( AvailableSettings.JPA_TRANSACTION_TYPE ) ) {
|
if ( integration.containsKey( AvailableSettings.JPA_TRANSACTION_TYPE ) ) {
|
||||||
String transactionType = (String) integration.get( AvailableSettings.JPA_TRANSACTION_TYPE );
|
String transactionType = (String) integration.get( AvailableSettings.JPA_TRANSACTION_TYPE );
|
||||||
persistenceUnit.setTransactionType( parseTransactionType( transactionType ) );
|
persistenceUnit.setTransactionType( parseTransactionType( transactionType ) );
|
||||||
}
|
}
|
||||||
|
else if ( integration.containsKey( AvailableSettings.JAKARTA_JPA_TRANSACTION_TYPE ) ) {
|
||||||
|
String transactionType = (String) integration.get( AvailableSettings.JAKARTA_JPA_TRANSACTION_TYPE );
|
||||||
|
persistenceUnit.setTransactionType( parseTransactionType( transactionType ) );
|
||||||
|
}
|
||||||
if ( integration.containsKey( AvailableSettings.JPA_JTA_DATASOURCE ) ) {
|
if ( integration.containsKey( AvailableSettings.JPA_JTA_DATASOURCE ) ) {
|
||||||
persistenceUnit.setJtaDataSource( integration.get( AvailableSettings.JPA_JTA_DATASOURCE ) );
|
persistenceUnit.setJtaDataSource( integration.get( AvailableSettings.JPA_JTA_DATASOURCE ) );
|
||||||
}
|
}
|
||||||
|
else if ( integration.containsKey( AvailableSettings.JAKARTA_JPA_JTA_DATASOURCE ) ) {
|
||||||
|
persistenceUnit.setJtaDataSource( integration.get( AvailableSettings.JAKARTA_JPA_JTA_DATASOURCE ) );
|
||||||
|
}
|
||||||
if ( integration.containsKey( AvailableSettings.JPA_NON_JTA_DATASOURCE ) ) {
|
if ( integration.containsKey( AvailableSettings.JPA_NON_JTA_DATASOURCE ) ) {
|
||||||
persistenceUnit.setNonJtaDataSource( integration.get( AvailableSettings.JPA_NON_JTA_DATASOURCE ) );
|
persistenceUnit.setNonJtaDataSource( integration.get( AvailableSettings.JPA_NON_JTA_DATASOURCE ) );
|
||||||
}
|
}
|
||||||
|
else if ( integration.containsKey( AvailableSettings.JAKARTA_JPA_NON_JTA_DATASOURCE ) ) {
|
||||||
|
persistenceUnit.setNonJtaDataSource( integration.get( AvailableSettings.JAKARTA_JPA_NON_JTA_DATASOURCE ) );
|
||||||
|
}
|
||||||
|
|
||||||
decodeTransactionType( persistenceUnit );
|
decodeTransactionType( persistenceUnit );
|
||||||
|
|
||||||
|
|
|
@ -101,7 +101,10 @@ public final class ProviderChecker {
|
||||||
if ( integration == null ) {
|
if ( integration == null ) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
final String setting = (String) integration.get( AvailableSettings.JPA_PERSISTENCE_PROVIDER );
|
String setting = (String) integration.get(AvailableSettings.JPA_PERSISTENCE_PROVIDER);
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = (String) integration.get(AvailableSettings.JAKARTA_JPA_PERSISTENCE_PROVIDER);
|
||||||
|
}
|
||||||
return setting == null ? null : setting.trim();
|
return setting == null ? null : setting.trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,8 @@ import javax.persistence.PessimisticLockScope;
|
||||||
|
|
||||||
import org.hibernate.LockOptions;
|
import org.hibernate.LockOptions;
|
||||||
|
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_SCOPE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_TIMEOUT;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
||||||
|
|
||||||
|
@ -31,7 +33,12 @@ public final class LockOptionsHelper {
|
||||||
* @param lockOptionsSupplier The reference to the lock to modify
|
* @param lockOptionsSupplier The reference to the lock to modify
|
||||||
*/
|
*/
|
||||||
public static void applyPropertiesToLockOptions(final Map<String, Object> props, final Supplier<LockOptions> lockOptionsSupplier) {
|
public static void applyPropertiesToLockOptions(final Map<String, Object> props, final Supplier<LockOptions> lockOptionsSupplier) {
|
||||||
Object lockScope = props.get( JPA_LOCK_SCOPE );
|
String lockScopeHint = JPA_LOCK_SCOPE;
|
||||||
|
Object lockScope = props.get( lockScopeHint );
|
||||||
|
if ( lockScope == null ) {
|
||||||
|
lockScopeHint = JAKARTA_JPA_LOCK_SCOPE;
|
||||||
|
lockScope = props.get( lockScopeHint );
|
||||||
|
}
|
||||||
if ( lockScope instanceof String && PessimisticLockScope.valueOf( (String) lockScope ) == PessimisticLockScope.EXTENDED ) {
|
if ( lockScope instanceof String && PessimisticLockScope.valueOf( (String) lockScope ) == PessimisticLockScope.EXTENDED ) {
|
||||||
lockOptionsSupplier.get().setScope( true );
|
lockOptionsSupplier.get().setScope( true );
|
||||||
}
|
}
|
||||||
|
@ -40,10 +47,15 @@ public final class LockOptionsHelper {
|
||||||
lockOptionsSupplier.get().setScope( extended );
|
lockOptionsSupplier.get().setScope( extended );
|
||||||
}
|
}
|
||||||
else if ( lockScope != null ) {
|
else if ( lockScope != null ) {
|
||||||
throw new PersistenceException( "Unable to parse " + JPA_LOCK_SCOPE + ": " + lockScope );
|
throw new PersistenceException( "Unable to parse " + lockScopeHint + ": " + lockScope );
|
||||||
}
|
}
|
||||||
|
|
||||||
Object lockTimeout = props.get( JPA_LOCK_TIMEOUT );
|
String timeoutHint = JPA_LOCK_TIMEOUT;
|
||||||
|
Object lockTimeout = props.get( timeoutHint );
|
||||||
|
if (lockTimeout == null) {
|
||||||
|
timeoutHint = JAKARTA_JPA_LOCK_TIMEOUT;
|
||||||
|
lockTimeout = props.get( timeoutHint );
|
||||||
|
}
|
||||||
int timeout = 0;
|
int timeout = 0;
|
||||||
boolean timeoutSet = false;
|
boolean timeoutSet = false;
|
||||||
if ( lockTimeout instanceof String ) {
|
if ( lockTimeout instanceof String ) {
|
||||||
|
@ -55,7 +67,7 @@ public final class LockOptionsHelper {
|
||||||
timeoutSet = true;
|
timeoutSet = true;
|
||||||
}
|
}
|
||||||
else if ( lockTimeout != null ) {
|
else if ( lockTimeout != null ) {
|
||||||
throw new PersistenceException( "Unable to parse " + JPA_LOCK_TIMEOUT + ": " + lockTimeout );
|
throw new PersistenceException( "Unable to parse " + timeoutHint + ": " + lockTimeout );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( timeoutSet ) {
|
if ( timeoutSet ) {
|
||||||
|
|
|
@ -263,6 +263,7 @@ public abstract class AbstractEntityPersister
|
||||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( AbstractEntityPersister.class );
|
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( AbstractEntityPersister.class );
|
||||||
|
|
||||||
public static final String ENTITY_CLASS = "class";
|
public static final String ENTITY_CLASS = "class";
|
||||||
|
public static final String VERSION_COLUMN_ALIAS = "version_";
|
||||||
|
|
||||||
private final String sqlAliasStem;
|
private final String sqlAliasStem;
|
||||||
private EntityMappingType rootEntityDescriptor;
|
private EntityMappingType rootEntityDescriptor;
|
||||||
|
@ -2018,7 +2019,7 @@ public abstract class AbstractEntityPersister
|
||||||
SimpleSelect select = new SimpleSelect( getFactory().getJdbcServices().getDialect() )
|
SimpleSelect select = new SimpleSelect( getFactory().getJdbcServices().getDialect() )
|
||||||
.setTableName( getVersionedTableName() );
|
.setTableName( getVersionedTableName() );
|
||||||
if ( isVersioned() ) {
|
if ( isVersioned() ) {
|
||||||
select.addColumn( versionColumnName );
|
select.addColumn( getVersionColumnName(), VERSION_COLUMN_ALIAS );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
select.addColumns( rootTableKeyColumnNames );
|
select.addColumns( rootTableKeyColumnNames );
|
||||||
|
@ -2244,7 +2245,7 @@ public abstract class AbstractEntityPersister
|
||||||
if ( !isVersioned() ) {
|
if ( !isVersioned() ) {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
return getVersionType().nullSafeGet( rs, getVersionColumnName(), session, null );
|
return getVersionType().nullSafeGet( rs, VERSION_COLUMN_ALIAS, session, null );
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( rs, st );
|
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( rs, st );
|
||||||
|
|
|
@ -871,7 +871,16 @@ public class JoinedSubclassEntityPersister extends AbstractEntityPersister {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getSubclassForDiscriminatorValue(Object value) {
|
public String getSubclassForDiscriminatorValue(Object value) {
|
||||||
return subclassesByDiscriminatorValue.get( value );
|
if ( value == null ) {
|
||||||
|
return subclassesByDiscriminatorValue.get( NULL_DISCRIMINATOR );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
String result = subclassesByDiscriminatorValue.get( value );
|
||||||
|
if ( result == null ) {
|
||||||
|
result = subclassesByDiscriminatorValue.get( NOT_NULL_DISCRIMINATOR );
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -77,6 +77,10 @@ import org.hibernate.transform.ResultTransformer;
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
|
|
||||||
import static org.hibernate.LockOptions.WAIT_FOREVER;
|
import static org.hibernate.LockOptions.WAIT_FOREVER;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_SCOPE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_LOCK_TIMEOUT;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_JPA_SHARED_CACHE_STORE_MODE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
||||||
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_RETRIEVE_MODE;
|
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_RETRIEVE_MODE;
|
||||||
|
@ -94,6 +98,9 @@ import static org.hibernate.jpa.QueryHints.HINT_LOADGRAPH;
|
||||||
import static org.hibernate.jpa.QueryHints.HINT_NATIVE_SPACES;
|
import static org.hibernate.jpa.QueryHints.HINT_NATIVE_SPACES;
|
||||||
import static org.hibernate.jpa.QueryHints.HINT_READONLY;
|
import static org.hibernate.jpa.QueryHints.HINT_READONLY;
|
||||||
import static org.hibernate.jpa.QueryHints.HINT_TIMEOUT;
|
import static org.hibernate.jpa.QueryHints.HINT_TIMEOUT;
|
||||||
|
import static org.hibernate.jpa.QueryHints.JAKARTA_HINT_FETCHGRAPH;
|
||||||
|
import static org.hibernate.jpa.QueryHints.JAKARTA_HINT_LOADGRAPH;
|
||||||
|
import static org.hibernate.jpa.QueryHints.JAKARTA_SPEC_HINT_TIMEOUT;
|
||||||
import static org.hibernate.jpa.QueryHints.SPEC_HINT_TIMEOUT;
|
import static org.hibernate.jpa.QueryHints.SPEC_HINT_TIMEOUT;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -905,16 +912,19 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
|
||||||
if ( queryTimeout != null ) {
|
if ( queryTimeout != null ) {
|
||||||
hints.put( HINT_TIMEOUT, queryTimeout );
|
hints.put( HINT_TIMEOUT, queryTimeout );
|
||||||
hints.put( SPEC_HINT_TIMEOUT, queryTimeout * 1000 );
|
hints.put( SPEC_HINT_TIMEOUT, queryTimeout * 1000 );
|
||||||
|
hints.put( JAKARTA_SPEC_HINT_TIMEOUT, queryTimeout * 1000 );
|
||||||
}
|
}
|
||||||
|
|
||||||
final LockOptions lockOptions = getLockOptions();
|
final LockOptions lockOptions = getLockOptions();
|
||||||
final int lockOptionsTimeOut = lockOptions.getTimeOut();
|
final int lockOptionsTimeOut = lockOptions.getTimeOut();
|
||||||
if ( lockOptionsTimeOut != WAIT_FOREVER ) {
|
if ( lockOptionsTimeOut != WAIT_FOREVER ) {
|
||||||
hints.put( JPA_LOCK_TIMEOUT, lockOptionsTimeOut );
|
hints.put( JPA_LOCK_TIMEOUT, lockOptionsTimeOut );
|
||||||
|
hints.put( JAKARTA_JPA_LOCK_TIMEOUT, lockOptionsTimeOut );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( lockOptions.getScope() ) {
|
if ( lockOptions.getScope() ) {
|
||||||
hints.put( JPA_LOCK_SCOPE, lockOptions.getScope() );
|
hints.put( JPA_LOCK_SCOPE, lockOptions.getScope() );
|
||||||
|
hints.put( JAKARTA_JPA_LOCK_SCOPE, lockOptions.getScope() );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( lockOptions.hasAliasSpecificLockModes() && canApplyAliasSpecificLockModeHints() ) {
|
if ( lockOptions.hasAliasSpecificLockModes() && canApplyAliasSpecificLockModeHints() ) {
|
||||||
|
@ -932,9 +942,11 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
|
||||||
|
|
||||||
final CacheMode cacheMode = getQueryOptions().getCacheMode();
|
final CacheMode cacheMode = getQueryOptions().getCacheMode();
|
||||||
if ( cacheMode != null ) {
|
if ( cacheMode != null ) {
|
||||||
hints.put( HINT_CACHE_MODE, cacheMode );
|
putIfNotNull( hints, HINT_CACHE_MODE, cacheMode );
|
||||||
hints.put( JPA_SHARED_CACHE_RETRIEVE_MODE, cacheMode.getJpaRetrieveMode() );
|
putIfNotNull( hints, JPA_SHARED_CACHE_RETRIEVE_MODE, cacheMode.getJpaRetrieveMode() );
|
||||||
hints.put( JPA_SHARED_CACHE_STORE_MODE, cacheMode.getJpaStoreMode() );
|
putIfNotNull( hints, JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE, cacheMode.getJpaRetrieveMode( ) );
|
||||||
|
putIfNotNull( hints, JPA_SHARED_CACHE_STORE_MODE, cacheMode.getJpaStoreMode() );
|
||||||
|
putIfNotNull( hints, JAKARTA_JPA_SHARED_CACHE_STORE_MODE, cacheMode.getJpaStoreMode() );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( isCacheable() ) {
|
if ( isCacheable() ) {
|
||||||
|
@ -975,12 +987,12 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
|
||||||
if ( HINT_TIMEOUT.equals( hintName ) ) {
|
if ( HINT_TIMEOUT.equals( hintName ) ) {
|
||||||
applied = applyTimeoutHint( ConfigurationHelper.getInteger( value ) );
|
applied = applyTimeoutHint( ConfigurationHelper.getInteger( value ) );
|
||||||
}
|
}
|
||||||
else if ( SPEC_HINT_TIMEOUT.equals( hintName ) ) {
|
else if ( SPEC_HINT_TIMEOUT.equals( hintName ) || JAKARTA_SPEC_HINT_TIMEOUT.equals( hintName ) ) {
|
||||||
// convert milliseconds to seconds
|
// convert milliseconds to seconds
|
||||||
int timeout = (int)Math.round( ConfigurationHelper.getInteger( value ).doubleValue() / 1000.0 );
|
int timeout = (int)Math.round( ConfigurationHelper.getInteger( value ).doubleValue() / 1000.0 );
|
||||||
applied = applyTimeoutHint( timeout );
|
applied = applyTimeoutHint( timeout );
|
||||||
}
|
}
|
||||||
else if ( JPA_LOCK_TIMEOUT.equals( hintName ) ) {
|
else if ( JPA_LOCK_TIMEOUT.equals( hintName ) || JAKARTA_JPA_LOCK_TIMEOUT.equals( hintName ) ) {
|
||||||
applied = applyLockTimeoutHint( ConfigurationHelper.getInteger( value ) );
|
applied = applyLockTimeoutHint( ConfigurationHelper.getInteger( value ) );
|
||||||
}
|
}
|
||||||
else if ( HINT_COMMENT.equals( hintName ) ) {
|
else if ( HINT_COMMENT.equals( hintName ) ) {
|
||||||
|
@ -1004,11 +1016,11 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
|
||||||
else if ( HINT_CACHE_MODE.equals( hintName ) ) {
|
else if ( HINT_CACHE_MODE.equals( hintName ) ) {
|
||||||
applied = applyCacheModeHint( ConfigurationHelper.getCacheMode( value ) );
|
applied = applyCacheModeHint( ConfigurationHelper.getCacheMode( value ) );
|
||||||
}
|
}
|
||||||
else if ( JPA_SHARED_CACHE_RETRIEVE_MODE.equals( hintName ) ) {
|
else if ( JPA_SHARED_CACHE_RETRIEVE_MODE.equals( hintName ) || JAKARTA_JPA_SHARED_CACHE_RETRIEVE_MODE.equals( hintName ) ) {
|
||||||
final CacheRetrieveMode retrieveMode = value != null ? CacheRetrieveMode.valueOf( value.toString() ) : null;
|
final CacheRetrieveMode retrieveMode = value != null ? CacheRetrieveMode.valueOf( value.toString() ) : null;
|
||||||
applied = applyJpaCacheRetrieveMode( retrieveMode );
|
applied = applyJpaCacheRetrieveMode( retrieveMode );
|
||||||
}
|
}
|
||||||
else if ( JPA_SHARED_CACHE_STORE_MODE.equals( hintName ) ) {
|
else if ( JPA_SHARED_CACHE_STORE_MODE.equals( hintName ) || JAKARTA_JPA_SHARED_CACHE_STORE_MODE.equals( hintName ) ) {
|
||||||
final CacheStoreMode storeMode = value != null ? CacheStoreMode.valueOf( value.toString() ) : null;
|
final CacheStoreMode storeMode = value != null ? CacheStoreMode.valueOf( value.toString() ) : null;
|
||||||
applied = applyJpaCacheStoreMode( storeMode );
|
applied = applyJpaCacheStoreMode( storeMode );
|
||||||
}
|
}
|
||||||
|
@ -1036,7 +1048,10 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
|
||||||
applied = false;
|
applied = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if ( HINT_FETCHGRAPH.equals( hintName ) || HINT_LOADGRAPH.equals( hintName ) ) {
|
else if ( HINT_FETCHGRAPH.equals( hintName )
|
||||||
|
|| HINT_LOADGRAPH.equals( hintName )
|
||||||
|
|| JAKARTA_HINT_FETCHGRAPH.equals( hintName )
|
||||||
|
|| JAKARTA_HINT_LOADGRAPH.equals( hintName ) ) {
|
||||||
if ( value instanceof RootGraph ) {
|
if ( value instanceof RootGraph ) {
|
||||||
applyGraph( (RootGraph) value, GraphSemantic.fromJpaHintName( hintName ) );
|
applyGraph( (RootGraph) value, GraphSemantic.fromJpaHintName( hintName ) );
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,10 @@ public class ManagedBeanRegistryInitiator implements StandardServiceInitiator<Ma
|
||||||
// simplified CDI support
|
// simplified CDI support
|
||||||
|
|
||||||
final boolean isCdiAvailable = isCdiAvailable( classLoaderService );
|
final boolean isCdiAvailable = isCdiAvailable( classLoaderService );
|
||||||
final Object beanManagerRef = cfgSvc.getSettings().get( AvailableSettings.CDI_BEAN_MANAGER );
|
Object beanManagerRef = cfgSvc.getSettings().get( AvailableSettings.CDI_BEAN_MANAGER );
|
||||||
|
if ( beanManagerRef == null ) {
|
||||||
|
beanManagerRef = cfgSvc.getSettings().get( AvailableSettings.JAKARTA_CDI_BEAN_MANAGER );
|
||||||
|
}
|
||||||
if ( beanManagerRef != null ) {
|
if ( beanManagerRef != null ) {
|
||||||
if ( !isCdiAvailable ) {
|
if ( !isCdiAvailable ) {
|
||||||
BeansMessageLogger.BEANS_MESSAGE_LOGGER.beanManagerButCdiNotAvailable( beanManagerRef );
|
BeansMessageLogger.BEANS_MESSAGE_LOGGER.beanManagerButCdiNotAvailable( beanManagerRef );
|
||||||
|
@ -126,7 +129,12 @@ public class ManagedBeanRegistryInitiator implements StandardServiceInitiator<Ma
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Class cdiBeanManagerClass(ClassLoaderService classLoaderService) throws ClassLoadingException {
|
public static Class cdiBeanManagerClass(ClassLoaderService classLoaderService) throws ClassLoadingException {
|
||||||
return classLoaderService.classForName( "javax.enterprise.inject.spi.BeanManager" );
|
try {
|
||||||
|
return classLoaderService.classForName( "javax.enterprise.inject.spi.BeanManager" );
|
||||||
|
}
|
||||||
|
catch (ClassLoadingException e) {
|
||||||
|
return classLoaderService.classForName( "jakarta.enterprise.inject.spi.BeanManager" );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -480,15 +480,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
||||||
* Find existing keys based on referencing column and referencedTable. "referencedColumnName" is not checked
|
* Find existing keys based on referencing column and referencedTable. "referencedColumnName" is not checked
|
||||||
* because that always is the primary key of the "referencedTable".
|
* because that always is the primary key of the "referencedTable".
|
||||||
*/
|
*/
|
||||||
Predicate<ColumnReferenceMapping> mappingPredicate = m -> {
|
if (equivalentForeignKeyExistsInDatabase(tableInformation, referencingColumn, referencedTable)) {
|
||||||
String existingReferencingColumn = m.getReferencingColumnMetadata().getColumnIdentifier().getText();
|
|
||||||
String existingReferencedTable = m.getReferencedColumnMetadata().getContainingTableInformation().getName().getTableName().getCanonicalName();
|
|
||||||
return referencingColumn.equals( existingReferencingColumn ) && referencedTable.equals( existingReferencedTable );
|
|
||||||
};
|
|
||||||
Stream<ForeignKeyInformation> keyStream = StreamSupport.stream( tableInformation.getForeignKeys().spliterator(), false );
|
|
||||||
Stream<ColumnReferenceMapping> mappingStream = keyStream.flatMap( k -> StreamSupport.stream( k.getColumnReferenceMappings().spliterator(), false ) );
|
|
||||||
boolean found = mappingStream.anyMatch( mappingPredicate );
|
|
||||||
if ( found ) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -497,6 +489,17 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
||||||
return tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) ) != null;
|
return tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) ) != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boolean equivalentForeignKeyExistsInDatabase(TableInformation tableInformation, String referencingColumn, String referencedTable) {
|
||||||
|
Predicate<ColumnReferenceMapping> mappingPredicate = m -> {
|
||||||
|
String existingReferencingColumn = m.getReferencingColumnMetadata().getColumnIdentifier().getText();
|
||||||
|
String existingReferencedTable = m.getReferencedColumnMetadata().getContainingTableInformation().getName().getTableName().getCanonicalName();
|
||||||
|
return referencingColumn.equalsIgnoreCase( existingReferencingColumn ) && referencedTable.equalsIgnoreCase( existingReferencedTable );
|
||||||
|
};
|
||||||
|
Stream<ForeignKeyInformation> keyStream = StreamSupport.stream( tableInformation.getForeignKeys().spliterator(), false );
|
||||||
|
Stream<ColumnReferenceMapping> mappingStream = keyStream.flatMap( k -> StreamSupport.stream( k.getColumnReferenceMappings().spliterator(), false ) );
|
||||||
|
return mappingStream.anyMatch( mappingPredicate );
|
||||||
|
}
|
||||||
|
|
||||||
protected void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
|
protected void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
|
||||||
final String exportIdentifier = exportable.getExportIdentifier();
|
final String exportIdentifier = exportable.getExportIdentifier();
|
||||||
if ( exportIdentifiers.contains( exportIdentifier ) ) {
|
if ( exportIdentifiers.contains( exportIdentifier ) ) {
|
||||||
|
|
|
@ -12,6 +12,7 @@ import java.io.Writer;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.hibernate.boot.model.relational.Namespace;
|
import org.hibernate.boot.model.relational.Namespace;
|
||||||
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
|
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
|
||||||
|
@ -24,6 +25,8 @@ import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
|
||||||
import org.hibernate.service.ServiceRegistry;
|
import org.hibernate.service.ServiceRegistry;
|
||||||
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
|
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
|
||||||
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
|
import org.hibernate.tool.schema.internal.exec.AbstractScriptSourceInput;
|
||||||
|
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputAggregate;
|
||||||
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromFile;
|
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromFile;
|
||||||
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromReader;
|
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromReader;
|
||||||
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
|
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
|
||||||
|
@ -41,6 +44,7 @@ import org.hibernate.tool.schema.spi.ScriptTargetOutput;
|
||||||
public class Helper {
|
public class Helper {
|
||||||
|
|
||||||
private static final CoreMessageLogger log = CoreLogging.messageLogger( Helper.class );
|
private static final CoreMessageLogger log = CoreLogging.messageLogger( Helper.class );
|
||||||
|
private static final Pattern COMMA_PATTERN = Pattern.compile( "\\s*,\\s*" );
|
||||||
|
|
||||||
public static ScriptSourceInput interpretScriptSourceSetting(
|
public static ScriptSourceInput interpretScriptSourceSetting(
|
||||||
Object scriptSourceSetting,
|
Object scriptSourceSetting,
|
||||||
|
@ -53,24 +57,40 @@ public class Helper {
|
||||||
final String scriptSourceSettingString = scriptSourceSetting.toString();
|
final String scriptSourceSettingString = scriptSourceSetting.toString();
|
||||||
log.debugf( "Attempting to resolve script source setting : %s", scriptSourceSettingString );
|
log.debugf( "Attempting to resolve script source setting : %s", scriptSourceSettingString );
|
||||||
|
|
||||||
// setting could be either:
|
final String[] paths = COMMA_PATTERN.split( scriptSourceSettingString );
|
||||||
// 1) string URL representation (i.e., "file://...")
|
if ( paths.length == 1 ) {
|
||||||
// 2) relative file path (resource lookup)
|
return interpretScriptSourceSetting( scriptSourceSettingString, classLoaderService, charsetName );
|
||||||
// 3) absolute file path
|
}
|
||||||
|
final AbstractScriptSourceInput[] inputs = new AbstractScriptSourceInput[paths.length];
|
||||||
log.trace( "Trying as URL..." );
|
for ( int i = 0; i < paths.length; i++ ) {
|
||||||
// ClassLoaderService.locateResource() first tries the given resource name as url form...
|
inputs[i] = interpretScriptSourceSetting( paths[i], classLoaderService, charsetName ) ;
|
||||||
final URL url = classLoaderService.locateResource( scriptSourceSettingString );
|
|
||||||
if ( url != null ) {
|
|
||||||
return new ScriptSourceInputFromUrl( url, charsetName );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// assume it is a File path
|
return new ScriptSourceInputAggregate( inputs );
|
||||||
final File file = new File( scriptSourceSettingString );
|
|
||||||
return new ScriptSourceInputFromFile( file, charsetName );
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static AbstractScriptSourceInput interpretScriptSourceSetting(
|
||||||
|
String scriptSourceSettingString,
|
||||||
|
ClassLoaderService classLoaderService,
|
||||||
|
String charsetName) {
|
||||||
|
// setting could be either:
|
||||||
|
// 1) string URL representation (i.e., "file://...")
|
||||||
|
// 2) relative file path (resource lookup)
|
||||||
|
// 3) absolute file path
|
||||||
|
|
||||||
|
log.trace( "Trying as URL..." );
|
||||||
|
// ClassLoaderService.locateResource() first tries the given resource name as url form...
|
||||||
|
final URL url = classLoaderService.locateResource( scriptSourceSettingString );
|
||||||
|
if ( url != null ) {
|
||||||
|
return new ScriptSourceInputFromUrl( url, charsetName );
|
||||||
|
}
|
||||||
|
|
||||||
|
// assume it is a File path
|
||||||
|
final File file = new File( scriptSourceSettingString );
|
||||||
|
return new ScriptSourceInputFromFile( file, charsetName );
|
||||||
|
}
|
||||||
|
|
||||||
public static ScriptTargetOutput interpretScriptTargetSetting(
|
public static ScriptTargetOutput interpretScriptTargetSetting(
|
||||||
Object scriptTargetSetting,
|
Object scriptTargetSetting,
|
||||||
ClassLoaderService classLoaderService,
|
ClassLoaderService classLoaderService,
|
||||||
|
@ -109,6 +129,9 @@ public class Helper {
|
||||||
if ( configurationValues.containsKey( AvailableSettings.HBM2DDL_CREATE_SCHEMAS ) ) {
|
if ( configurationValues.containsKey( AvailableSettings.HBM2DDL_CREATE_SCHEMAS ) ) {
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
|
if ( configurationValues.containsKey( AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCHEMAS ) ) {
|
||||||
|
count++;
|
||||||
|
}
|
||||||
if ( configurationValues.containsKey( AvailableSettings.HBM2DDL_CREATE_NAMESPACES ) ) {
|
if ( configurationValues.containsKey( AvailableSettings.HBM2DDL_CREATE_NAMESPACES ) ) {
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
|
@ -122,15 +145,20 @@ public class Helper {
|
||||||
return ConfigurationHelper.getBoolean(
|
return ConfigurationHelper.getBoolean(
|
||||||
AvailableSettings.HBM2DDL_CREATE_SCHEMAS,
|
AvailableSettings.HBM2DDL_CREATE_SCHEMAS,
|
||||||
configurationValues,
|
configurationValues,
|
||||||
//Then try the Hibernate ORM setting:
|
//Then try the Jakarta JPA setting:
|
||||||
ConfigurationHelper.getBoolean(
|
ConfigurationHelper.getBoolean(
|
||||||
AvailableSettings.HBM2DDL_CREATE_NAMESPACES,
|
AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCHEMAS,
|
||||||
configurationValues,
|
configurationValues,
|
||||||
//And finally fall back to the old name this had before we fixed the typo:
|
//Then try the Hibernate ORM setting:
|
||||||
ConfigurationHelper.getBoolean(
|
ConfigurationHelper.getBoolean(
|
||||||
AvailableSettings.HBM2DLL_CREATE_NAMESPACES,
|
AvailableSettings.HBM2DDL_CREATE_NAMESPACES,
|
||||||
configurationValues,
|
configurationValues,
|
||||||
false
|
//And finally fall back to the old name this had before we fixed the typo:
|
||||||
|
ConfigurationHelper.getBoolean(
|
||||||
|
AvailableSettings.HBM2DLL_CREATE_NAMESPACES,
|
||||||
|
configurationValues,
|
||||||
|
false
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
|
@ -46,6 +46,7 @@ import org.jboss.logging.Logger;
|
||||||
|
|
||||||
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CONNECTION;
|
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CONNECTION;
|
||||||
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_DELIMITER;
|
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_DELIMITER;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_CONNECTION;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The standard Hibernate implementation for performing schema management.
|
* The standard Hibernate implementation for performing schema management.
|
||||||
|
@ -196,13 +197,35 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
|
||||||
if ( providedConnection != null ) {
|
if ( providedConnection != null ) {
|
||||||
jdbcContextBuilder.jdbcConnectionAccess = new JdbcConnectionAccessProvidedConnectionImpl( providedConnection );
|
jdbcContextBuilder.jdbcConnectionAccess = new JdbcConnectionAccessProvidedConnectionImpl( providedConnection );
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
final Connection jakartaProvidedConnection = (Connection) configurationValues.get( JAKARTA_HBM2DDL_CONNECTION );
|
||||||
|
if ( jakartaProvidedConnection != null ) {
|
||||||
|
jdbcContextBuilder.jdbcConnectionAccess = new JdbcConnectionAccessProvidedConnectionImpl( jakartaProvidedConnection );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// see if a specific Dialect override has been provided...
|
// see if a specific Dialect override has been provided...
|
||||||
final String explicitDbName = (String) configurationValues.get( AvailableSettings.DIALECT_DB_NAME );
|
String dbName = (String) configurationValues.get( AvailableSettings.DIALECT_DB_NAME );
|
||||||
|
if ( dbName == null ) {
|
||||||
|
dbName = (String) configurationValues.get( AvailableSettings.JAKARTA_HBM2DDL_DB_NAME );
|
||||||
|
}
|
||||||
|
final String explicitDbName = dbName;
|
||||||
if ( StringHelper.isNotEmpty( explicitDbName ) ) {
|
if ( StringHelper.isNotEmpty( explicitDbName ) ) {
|
||||||
final String explicitDbVersion = (String) configurationValues.get( AvailableSettings.DIALECT_DB_VERSION );
|
String dbVersion = (String) configurationValues.get( AvailableSettings.DIALECT_DB_VERSION );
|
||||||
final String explicitDbMajor = (String) configurationValues.get( AvailableSettings.DIALECT_DB_MAJOR_VERSION );
|
if ( dbVersion == null ) {
|
||||||
final String explicitDbMinor = (String) configurationValues.get( AvailableSettings.DIALECT_DB_MINOR_VERSION );
|
dbVersion = (String) configurationValues.get( AvailableSettings.JAKARTA_DIALECT_DB_VERSION );
|
||||||
|
}
|
||||||
|
String dbMajor = (String) configurationValues.get( AvailableSettings.DIALECT_DB_MAJOR_VERSION );
|
||||||
|
if ( dbMajor == null ) {
|
||||||
|
dbMajor = (String) configurationValues.get( AvailableSettings.JAKARTA_HBM2DDL_DB_MAJOR_VERSION );
|
||||||
|
}
|
||||||
|
String dbMinor = (String) configurationValues.get( AvailableSettings.DIALECT_DB_MINOR_VERSION );
|
||||||
|
if ( dbMinor == null ) {
|
||||||
|
dbMinor = (String) configurationValues.get( AvailableSettings.JAKARTA_HBM2DDL_DB_MINOR_VERSION );
|
||||||
|
}
|
||||||
|
final String explicitDbVersion = dbVersion;
|
||||||
|
final String explicitDbMajor = dbMajor;
|
||||||
|
final String explicitDbMinor = dbMinor;
|
||||||
|
|
||||||
final Dialect indicatedDialect = serviceRegistry.getService( DialectResolver.class ).resolveDialect(
|
final Dialect indicatedDialect = serviceRegistry.getService( DialectResolver.class ).resolveDialect(
|
||||||
new DialectResolutionInfo() {
|
new DialectResolutionInfo() {
|
||||||
|
|
|
@ -63,6 +63,7 @@ import org.hibernate.tool.schema.spi.TargetDescriptor;
|
||||||
|
|
||||||
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CHARSET_NAME;
|
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CHARSET_NAME;
|
||||||
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_LOAD_SCRIPT_SOURCE;
|
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_LOAD_SCRIPT_SOURCE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE;
|
||||||
import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting;
|
import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -506,7 +507,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
|
||||||
//final Formatter formatter = format ? DDLFormatterImpl.INSTANCE : FormatStyle.NONE.getFormatter();
|
//final Formatter formatter = format ? DDLFormatterImpl.INSTANCE : FormatStyle.NONE.getFormatter();
|
||||||
final Formatter formatter = FormatStyle.NONE.getFormatter();
|
final Formatter formatter = FormatStyle.NONE.getFormatter();
|
||||||
|
|
||||||
final Object importScriptSetting = options.getConfigurationValues().get( HBM2DDL_LOAD_SCRIPT_SOURCE );
|
Object importScriptSetting = options.getConfigurationValues().get( HBM2DDL_LOAD_SCRIPT_SOURCE );
|
||||||
|
if ( importScriptSetting == null ) {
|
||||||
|
importScriptSetting = options.getConfigurationValues().get( JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE );
|
||||||
|
}
|
||||||
String charsetName = (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME );
|
String charsetName = (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME );
|
||||||
|
|
||||||
if ( importScriptSetting != null ) {
|
if ( importScriptSetting != null ) {
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema.internal.exec;
|
||||||
|
|
||||||
|
import java.io.Reader;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import org.hibernate.internal.CoreLogging;
|
||||||
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
|
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
|
||||||
|
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A script source input that aggregates over multiple other {@link ScriptSourceInput}.
|
||||||
|
*
|
||||||
|
* @author Christian Beikov
|
||||||
|
*/
|
||||||
|
public class ScriptSourceInputAggregate implements ScriptSourceInput {
|
||||||
|
|
||||||
|
private static final CoreMessageLogger log = CoreLogging.messageLogger( SchemaCreatorImpl.class );
|
||||||
|
|
||||||
|
private final AbstractScriptSourceInput[] inputs;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a ScriptSourceInputAggregate
|
||||||
|
*
|
||||||
|
* @param inputs The script source inputs
|
||||||
|
*/
|
||||||
|
public ScriptSourceInputAggregate(AbstractScriptSourceInput[] inputs) {
|
||||||
|
this.inputs = inputs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> extract(Function<Reader, List<String>> extractor) {
|
||||||
|
|
||||||
|
final List<String>[] lists = new List[inputs.length];
|
||||||
|
int size = 0;
|
||||||
|
for ( int i = 0; i < inputs.length; i++ ) {
|
||||||
|
final AbstractScriptSourceInput scriptSourceInput = inputs[i];
|
||||||
|
final Reader reader = scriptSourceInput.prepareReader();
|
||||||
|
try {
|
||||||
|
log.executingImportScript( scriptSourceInput.getScriptDescription() );
|
||||||
|
lists[i] = extractor.apply( reader );
|
||||||
|
size += lists[i].size();
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
scriptSourceInput.releaseReader( reader );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final List<String> list = new ArrayList<>( size );
|
||||||
|
for ( List<String> strings : lists ) {
|
||||||
|
list.addAll( strings );
|
||||||
|
}
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "ScriptSourceInputAggregate(" + Arrays.toString( inputs ) + ")";
|
||||||
|
}
|
||||||
|
}
|
|
@ -13,7 +13,6 @@ import java.io.InputStreamReader;
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
|
|
||||||
import org.hibernate.tool.schema.spi.SchemaManagementException;
|
import org.hibernate.tool.schema.spi.SchemaManagementException;
|
||||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
|
|
||||||
|
@ -22,7 +21,7 @@ import org.jboss.logging.Logger;
|
||||||
*
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ScriptSourceInputFromFile extends AbstractScriptSourceInput implements ScriptSourceInput {
|
public class ScriptSourceInputFromFile extends AbstractScriptSourceInput {
|
||||||
private static final Logger log = Logger.getLogger( ScriptSourceInputFromFile.class );
|
private static final Logger log = Logger.getLogger( ScriptSourceInputFromFile.class );
|
||||||
|
|
||||||
private final File file;
|
private final File file;
|
||||||
|
|
|
@ -8,14 +8,12 @@ package org.hibernate.tool.schema.internal.exec;
|
||||||
|
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
|
|
||||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* ScriptSourceInput implementation for explicitly given Readers.
|
* ScriptSourceInput implementation for explicitly given Readers.
|
||||||
*
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ScriptSourceInputFromReader extends AbstractScriptSourceInput implements ScriptSourceInput {
|
public class ScriptSourceInputFromReader extends AbstractScriptSourceInput {
|
||||||
private final Reader reader;
|
private final Reader reader;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -12,7 +12,6 @@ import java.io.Reader;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
|
||||||
import org.hibernate.tool.schema.spi.SchemaManagementException;
|
import org.hibernate.tool.schema.spi.SchemaManagementException;
|
||||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
|
|
||||||
|
@ -22,7 +21,7 @@ import org.jboss.logging.Logger;
|
||||||
* @author Christian Beikov
|
* @author Christian Beikov
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ScriptSourceInputFromUrl extends AbstractScriptSourceInput implements ScriptSourceInput {
|
public class ScriptSourceInputFromUrl extends AbstractScriptSourceInput {
|
||||||
private static final Logger log = Logger.getLogger( ScriptSourceInputFromFile.class );
|
private static final Logger log = Logger.getLogger( ScriptSourceInputFromFile.class );
|
||||||
|
|
||||||
private final URL url;
|
private final URL url;
|
||||||
|
|
|
@ -18,12 +18,27 @@ import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
||||||
*
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class ScriptSourceInputNonExistentImpl implements ScriptSourceInput {
|
public class ScriptSourceInputNonExistentImpl extends AbstractScriptSourceInput {
|
||||||
/**
|
/**
|
||||||
* Singleton access
|
* Singleton access
|
||||||
*/
|
*/
|
||||||
public static final ScriptSourceInputNonExistentImpl INSTANCE = new ScriptSourceInputNonExistentImpl();
|
public static final ScriptSourceInputNonExistentImpl INSTANCE = new ScriptSourceInputNonExistentImpl();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getScriptDescription() {
|
||||||
|
return "[injected ScriptSourceInputNonExistentImpl script]";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader prepareReader() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void releaseReader(Reader reader) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> extract(Function<Reader, List<String>> extractor) {
|
public List<String> extract(Function<Reader, List<String>> extractor) {
|
||||||
return Collections.emptyList();
|
return Collections.emptyList();
|
||||||
|
|
|
@ -39,6 +39,14 @@ import static org.hibernate.cfg.AvailableSettings.HBM2DDL_DROP_SOURCE;
|
||||||
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_ACTION;
|
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_ACTION;
|
||||||
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_CREATE_TARGET;
|
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_CREATE_TARGET;
|
||||||
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_DROP_TARGET;
|
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_DROP_TARGET;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCRIPT_SOURCE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_CREATE_SOURCE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_DROP_SCRIPT_SOURCE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_DROP_SOURCE;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_CREATE_TARGET;
|
||||||
|
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_DROP_TARGET;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Responsible for coordinating SchemaManagementTool execution(s) for auto-tooling whether
|
* Responsible for coordinating SchemaManagementTool execution(s) for auto-tooling whether
|
||||||
|
@ -491,17 +499,29 @@ public class SchemaManagementToolCoordinator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getSourceTypeSetting(Map<?,?> configurationValues) {
|
public Object getSourceTypeSetting(Map<?,?> configurationValues) {
|
||||||
return configurationValues.get( HBM2DDL_CREATE_SOURCE );
|
Object setting = configurationValues.get( HBM2DDL_CREATE_SOURCE );
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = configurationValues.get( JAKARTA_HBM2DDL_CREATE_SOURCE );
|
||||||
|
}
|
||||||
|
return setting;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getScriptSourceSetting(Map<?,?> configurationValues) {
|
public Object getScriptSourceSetting(Map<?,?> configurationValues) {
|
||||||
return configurationValues.get( HBM2DDL_CREATE_SCRIPT_SOURCE );
|
Object setting = configurationValues.get( HBM2DDL_CREATE_SCRIPT_SOURCE );
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = configurationValues.get( JAKARTA_HBM2DDL_CREATE_SCRIPT_SOURCE );
|
||||||
|
}
|
||||||
|
return setting;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
|
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
|
||||||
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
|
Object setting = configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = configurationValues.get( JAKARTA_HBM2DDL_SCRIPTS_CREATE_TARGET );
|
||||||
|
}
|
||||||
|
return setting;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -513,17 +533,29 @@ public class SchemaManagementToolCoordinator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getSourceTypeSetting(Map<?,?> configurationValues) {
|
public Object getSourceTypeSetting(Map<?,?> configurationValues) {
|
||||||
return configurationValues.get( HBM2DDL_DROP_SOURCE );
|
Object setting = configurationValues.get( HBM2DDL_DROP_SOURCE );
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = configurationValues.get( JAKARTA_HBM2DDL_DROP_SOURCE );
|
||||||
|
}
|
||||||
|
return setting;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getScriptSourceSetting(Map<?,?> configurationValues) {
|
public Object getScriptSourceSetting(Map<?,?> configurationValues) {
|
||||||
return configurationValues.get( HBM2DDL_DROP_SCRIPT_SOURCE );
|
Object setting = configurationValues.get( HBM2DDL_DROP_SCRIPT_SOURCE );
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = configurationValues.get( JAKARTA_HBM2DDL_DROP_SCRIPT_SOURCE );
|
||||||
|
}
|
||||||
|
return setting;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
|
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
|
||||||
return configurationValues.get( HBM2DDL_SCRIPTS_DROP_TARGET );
|
Object setting = configurationValues.get( HBM2DDL_SCRIPTS_DROP_TARGET );
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = configurationValues.get( JAKARTA_HBM2DDL_SCRIPTS_DROP_TARGET );
|
||||||
|
}
|
||||||
|
return setting;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -551,7 +583,11 @@ public class SchemaManagementToolCoordinator {
|
||||||
@Override
|
@Override
|
||||||
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
|
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
|
||||||
// for now, reuse the CREATE script target setting
|
// for now, reuse the CREATE script target setting
|
||||||
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
|
Object setting = configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
|
||||||
|
if ( setting == null ) {
|
||||||
|
setting = configurationValues.get( JAKARTA_HBM2DDL_SCRIPTS_CREATE_TARGET );
|
||||||
|
}
|
||||||
|
return setting;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -588,9 +624,17 @@ public class SchemaManagementToolCoordinator {
|
||||||
*/
|
*/
|
||||||
@Internal
|
@Internal
|
||||||
public static ActionGrouping interpret(Map configurationValues) {
|
public static ActionGrouping interpret(Map configurationValues) {
|
||||||
|
Object databaseActionSetting = configurationValues.get( HBM2DDL_DATABASE_ACTION );
|
||||||
|
Object scriptsActionSetting = configurationValues.get( HBM2DDL_SCRIPTS_ACTION );
|
||||||
|
if ( databaseActionSetting == null ) {
|
||||||
|
databaseActionSetting = configurationValues.get( JAKARTA_HBM2DDL_DATABASE_ACTION );
|
||||||
|
}
|
||||||
|
if ( scriptsActionSetting == null ) {
|
||||||
|
scriptsActionSetting = configurationValues.get( JAKARTA_HBM2DDL_SCRIPTS_ACTION );
|
||||||
|
}
|
||||||
// interpret the JPA settings first
|
// interpret the JPA settings first
|
||||||
Action databaseAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_DATABASE_ACTION ) );
|
Action databaseAction = Action.interpretJpaSetting( databaseActionSetting );
|
||||||
Action scriptAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_SCRIPTS_ACTION ) );
|
Action scriptAction = Action.interpretJpaSetting( scriptsActionSetting );
|
||||||
|
|
||||||
// if no JPA settings were specified, look at the legacy HBM2DDL_AUTO setting...
|
// if no JPA settings were specified, look at the legacy HBM2DDL_AUTO setting...
|
||||||
if ( databaseAction == Action.NONE && scriptAction == Action.NONE ) {
|
if ( databaseAction == Action.NONE && scriptAction == Action.NONE ) {
|
||||||
|
|
|
@ -45,10 +45,6 @@ public class JpaFileSchemaGeneratorWithHbm2DdlCharsetNameTest extends JpaSchemaG
|
||||||
return toFilePath(super.getDropSqlScript());
|
return toFilePath(super.getDropSqlScript());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String toFilePath(String relativePath) {
|
|
||||||
return Thread.currentThread().getContextClassLoader().getResource( relativePath ).getFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getResourceUrlString(String resource) {
|
protected String getResourceUrlString(String resource) {
|
||||||
return resource;
|
return resource;
|
||||||
|
|
|
@ -22,9 +22,11 @@ import org.hibernate.event.spi.EventType;
|
||||||
import org.hibernate.integrator.spi.Integrator;
|
import org.hibernate.integrator.spi.Integrator;
|
||||||
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
|
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
|
||||||
|
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,6 +35,7 @@ import static org.junit.Assert.assertEquals;
|
||||||
*
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
|
@TestForIssue(jiraKey = {"HHH-2884", "HHH-10674", "HHH-14541"})
|
||||||
public class CallbackTest extends BaseCoreFunctionalTestCase {
|
public class CallbackTest extends BaseCoreFunctionalTestCase {
|
||||||
private TestingObserver observer = new TestingObserver();
|
private TestingObserver observer = new TestingObserver();
|
||||||
private TestingListener listener = new TestingListener();
|
private TestingListener listener = new TestingListener();
|
||||||
|
@ -97,16 +100,22 @@ public class CallbackTest extends BaseCoreFunctionalTestCase {
|
||||||
private int closedCount = 0;
|
private int closedCount = 0;
|
||||||
private int closingCount = 0;
|
private int closingCount = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
public void sessionFactoryCreated(SessionFactory factory) {
|
public void sessionFactoryCreated(SessionFactory factory) {
|
||||||
|
assertThat( factory.isClosed() ).isFalse();
|
||||||
creationCount++;
|
creationCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void sessionFactoryClosing(SessionFactory factory) {
|
public void sessionFactoryClosing(SessionFactory factory) {
|
||||||
|
// Test for HHH-14541
|
||||||
|
assertThat( factory.isClosed() ).isFalse();
|
||||||
closingCount++;
|
closingCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void sessionFactoryClosed(SessionFactory factory) {
|
public void sessionFactoryClosed(SessionFactory factory) {
|
||||||
|
assertThat( factory.isClosed() ).isTrue();
|
||||||
closedCount++;
|
closedCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,10 +30,6 @@ public class JpaFileSchemaGeneratorTest extends JpaSchemaGeneratorTest {
|
||||||
return toFilePath(super.getDropSqlScript());
|
return toFilePath(super.getDropSqlScript());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String toFilePath(String relativePath) {
|
|
||||||
return Thread.currentThread().getContextClassLoader().getResource( relativePath ).getFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getResourceUrlString(String resource) {
|
protected String getResourceUrlString(String resource) {
|
||||||
return resource;
|
return resource;
|
||||||
|
|
|
@ -8,6 +8,7 @@ package org.hibernate.orm.test.jpa.schemagen;
|
||||||
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
import javax.persistence.EntityManager;
|
import javax.persistence.EntityManager;
|
||||||
import javax.persistence.EntityManagerFactory;
|
import javax.persistence.EntityManagerFactory;
|
||||||
|
|
||||||
|
@ -19,6 +20,7 @@ import org.hibernate.jpa.boot.spi.EntityManagerFactoryBuilder;
|
||||||
import org.hibernate.testing.TestForIssue;
|
import org.hibernate.testing.TestForIssue;
|
||||||
import org.hibernate.testing.orm.junit.EntityManagerFactoryBasedFunctionalTest;
|
import org.hibernate.testing.orm.junit.EntityManagerFactoryBasedFunctionalTest;
|
||||||
import org.hibernate.testing.orm.junit.RequiresDialect;
|
import org.hibernate.testing.orm.junit.RequiresDialect;
|
||||||
|
import org.junit.Assert;
|
||||||
import org.junit.jupiter.api.Assertions;
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
@ -28,9 +30,9 @@ import org.junit.jupiter.api.Test;
|
||||||
@RequiresDialect( H2Dialect.class )
|
@RequiresDialect( H2Dialect.class )
|
||||||
public class JpaSchemaGeneratorTest extends EntityManagerFactoryBasedFunctionalTest {
|
public class JpaSchemaGeneratorTest extends EntityManagerFactoryBasedFunctionalTest {
|
||||||
|
|
||||||
private final String LOAD_SQL = getScriptFolderPath() + "load-script-source.sql";
|
private final String LOAD_SQL = getScriptFolderPath() + "load-script-source.sql , " + getScriptFolderPath() + "load-script-source2.sql";
|
||||||
private final String CREATE_SQL = getScriptFolderPath() + "create-script-source.sql";
|
private final String CREATE_SQL = getScriptFolderPath() + "create-script-source.sql , " + getScriptFolderPath() + "create-script-source2.sql";
|
||||||
private final String DROP_SQL = getScriptFolderPath() + "drop-script-source.sql";
|
private final String DROP_SQL = getScriptFolderPath() + "drop-script-source.sql , " + getScriptFolderPath() + "drop-script-source2.sql";
|
||||||
|
|
||||||
private static int schemagenNumber = 0;
|
private static int schemagenNumber = 0;
|
||||||
|
|
||||||
|
@ -119,12 +121,29 @@ public class JpaSchemaGeneratorTest extends EntityManagerFactoryBasedFunctionalT
|
||||||
return "sch" + (char) 233 + "magen-test";
|
return "sch" + (char) 233 + "magen-test";
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String getResourceUrlString(String resource) {
|
protected String getResourceUrlString(String string) {
|
||||||
final URL url = getClass().getClassLoader().getResource( resource );
|
return getResourceUrlString( getClass().getClassLoader(), string, URL::toString );
|
||||||
if ( url == null ) {
|
}
|
||||||
throw new RuntimeException( "Unable to locate requested resource [" + resource + "]" );
|
|
||||||
|
protected String getResourceUrlString(ClassLoader classLoader, String string, Function<URL, String> transformer) {
|
||||||
|
final String[] strings = string.split( "\\s*,\\s*" );
|
||||||
|
final StringBuilder sb = new StringBuilder( string.length() );
|
||||||
|
for ( int i = 0; i < strings.length; i++ ) {
|
||||||
|
if ( i != 0 ) {
|
||||||
|
sb.append( ',' );
|
||||||
|
}
|
||||||
|
final String resource = strings[i];
|
||||||
|
final URL url = classLoader.getResource( resource );
|
||||||
|
if ( url == null ) {
|
||||||
|
throw new RuntimeException( "Unable to locate requested resource [" + resource + "]" );
|
||||||
|
}
|
||||||
|
sb.append( transformer.apply( url ) );
|
||||||
}
|
}
|
||||||
return url.toString();
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected String toFilePath(String relativePath) {
|
||||||
|
return getResourceUrlString( Thread.currentThread().getContextClassLoader(), relativePath, URL::getFile );
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doTest(Map<Object, Object> settings) {
|
private void doTest(Map<Object, Object> settings) {
|
||||||
|
@ -141,6 +160,7 @@ public class JpaSchemaGeneratorTest extends EntityManagerFactoryBasedFunctionalT
|
||||||
EntityManager em = emf.createEntityManager();
|
EntityManager em = emf.createEntityManager();
|
||||||
try {
|
try {
|
||||||
Assertions.assertNotNull( em.find( Item.class, encodedName() ) );
|
Assertions.assertNotNull( em.find( Item.class, encodedName() ) );
|
||||||
|
Assert.assertNotNull( em.find( Item.class, "multi-file-test" ) );
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
em.close();
|
em.close();
|
||||||
|
|
|
@ -0,0 +1,80 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||||
|
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy;
|
||||||
|
|
||||||
|
import javax.persistence.Basic;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
|
||||||
|
import org.hibernate.Hibernate;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.EnhancementOptions;
|
||||||
|
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
@TestForIssue(jiraKey = "HHH-14571")
|
||||||
|
@RunWith(BytecodeEnhancerRunner.class)
|
||||||
|
@EnhancementOptions(lazyLoading = true, extendedEnhancement = true)
|
||||||
|
public class IdInUninitializedProxyTest extends BaseNonConfigCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class[] getAnnotatedClasses() {
|
||||||
|
return new Class[] { AnEntity.class };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void configureStandardServiceRegistryBuilder(StandardServiceRegistryBuilder ssrb) {
|
||||||
|
super.configureStandardServiceRegistryBuilder( ssrb );
|
||||||
|
ssrb.applySetting( AvailableSettings.ALLOW_ENHANCEMENT_AS_PROXY, true );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIdIsAlwaysConsideredInitialized() {
|
||||||
|
inTransaction( session -> {
|
||||||
|
final AnEntity e = session.byId( AnEntity.class ).getReference( 1 );
|
||||||
|
assertFalse( Hibernate.isInitialized( e ) );
|
||||||
|
// This is the gist of the problem
|
||||||
|
assertTrue( Hibernate.isPropertyInitialized( e, "id" ) );
|
||||||
|
assertFalse( Hibernate.isPropertyInitialized( e, "name" ) );
|
||||||
|
|
||||||
|
assertEquals( "George", e.name );
|
||||||
|
assertTrue( Hibernate.isInitialized( e ) );
|
||||||
|
assertTrue( Hibernate.isPropertyInitialized( e, "id" ) );
|
||||||
|
assertTrue( Hibernate.isPropertyInitialized( e, "name" ) );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void prepareTestData() {
|
||||||
|
inTransaction( session -> {
|
||||||
|
AnEntity anEntity = new AnEntity();
|
||||||
|
anEntity.id = 1;
|
||||||
|
anEntity.name = "George";
|
||||||
|
session.persist( anEntity );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "AnEntity")
|
||||||
|
public static class AnEntity {
|
||||||
|
@Id
|
||||||
|
private int id;
|
||||||
|
|
||||||
|
@Basic
|
||||||
|
private String name;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -36,6 +36,7 @@ import static org.hamcrest.CoreMatchers.not;
|
||||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||||
import static org.hamcrest.CoreMatchers.sameInstance;
|
import static org.hamcrest.CoreMatchers.sameInstance;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hibernate.Hibernate.isInitialized;
|
||||||
import static org.hibernate.Hibernate.isPropertyInitialized;
|
import static org.hibernate.Hibernate.isPropertyInitialized;
|
||||||
import static org.hibernate.testing.bytecode.enhancement.EnhancerTestUtils.checkDirtyTracking;
|
import static org.hibernate.testing.bytecode.enhancement.EnhancerTestUtils.checkDirtyTracking;
|
||||||
import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate;
|
import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate;
|
||||||
|
@ -90,7 +91,6 @@ public class LazyCollectionLoadingTest extends BaseCoreFunctionalTestCase {
|
||||||
Parent parent = s.load( Parent.class, parentID );
|
Parent parent = s.load( Parent.class, parentID );
|
||||||
assertThat( parent, notNullValue() );
|
assertThat( parent, notNullValue() );
|
||||||
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
|
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
|
||||||
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
|
|
||||||
assertFalse( isPropertyInitialized( parent, "children" ) );
|
assertFalse( isPropertyInitialized( parent, "children" ) );
|
||||||
checkDirtyTracking( parent );
|
checkDirtyTracking( parent );
|
||||||
|
|
||||||
|
@ -101,7 +101,39 @@ public class LazyCollectionLoadingTest extends BaseCoreFunctionalTestCase {
|
||||||
checkDirtyTracking( parent );
|
checkDirtyTracking( parent );
|
||||||
|
|
||||||
assertThat( children1, sameInstance( children2 ) );
|
assertThat( children1, sameInstance( children2 ) );
|
||||||
|
|
||||||
|
assertFalse( isInitialized( children1 ) );
|
||||||
assertThat( children1.size(), equalTo( CHILDREN_SIZE ) );
|
assertThat( children1.size(), equalTo( CHILDREN_SIZE ) );
|
||||||
|
assertTrue( isInitialized( children1 ) );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@TestForIssue( jiraKey = "HHH-14620" )
|
||||||
|
public void testTransaction_noProxy() {
|
||||||
|
doInHibernate( this::sessionFactory, s -> {
|
||||||
|
// find will not return a proxy, which is exactly what we want here.
|
||||||
|
Parent parent = s.find( Parent.class, parentID );
|
||||||
|
assertThat( parent, notNullValue() );
|
||||||
|
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
|
||||||
|
assertFalse( isPropertyInitialized( parent, "children" ) );
|
||||||
|
checkDirtyTracking( parent );
|
||||||
|
|
||||||
|
List<Child> children1 = parent.children;
|
||||||
|
List<Child> children2 = parent.children;
|
||||||
|
|
||||||
|
assertTrue( isPropertyInitialized( parent, "children" ) );
|
||||||
|
checkDirtyTracking( parent );
|
||||||
|
|
||||||
|
assertThat( children1, sameInstance( children2 ) );
|
||||||
|
|
||||||
|
// This check is important: a bug used to cause the collection to be initialized
|
||||||
|
// during the call to parent.children above.
|
||||||
|
// Note the same problem would occur if we were using getters:
|
||||||
|
// we only need extended enhancement to be enabled.
|
||||||
|
assertFalse( isInitialized( children1 ) );
|
||||||
|
assertThat( children1.size(), equalTo( CHILDREN_SIZE ) );
|
||||||
|
assertTrue( isInitialized( children1 ) );
|
||||||
} );
|
} );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,7 +143,6 @@ public class LazyCollectionLoadingTest extends BaseCoreFunctionalTestCase {
|
||||||
parent = s.load( Parent.class, parentID );
|
parent = s.load( Parent.class, parentID );
|
||||||
assertThat( parent, notNullValue() );
|
assertThat( parent, notNullValue() );
|
||||||
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
|
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
|
||||||
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
|
|
||||||
assertFalse( isPropertyInitialized( parent, "children" ) );
|
assertFalse( isPropertyInitialized( parent, "children" ) );
|
||||||
} );
|
} );
|
||||||
|
|
||||||
|
@ -122,7 +153,10 @@ public class LazyCollectionLoadingTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
checkDirtyTracking( parent );
|
checkDirtyTracking( parent );
|
||||||
assertThat( children1, sameInstance( children2 ) );
|
assertThat( children1, sameInstance( children2 ) );
|
||||||
|
|
||||||
|
assertFalse( isInitialized( children1 ) );
|
||||||
assertThat( children1.size(), equalTo( CHILDREN_SIZE ) );
|
assertThat( children1.size(), equalTo( CHILDREN_SIZE ) );
|
||||||
|
assertTrue( isInitialized( children1 ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- //
|
// --- //
|
||||||
|
|
|
@ -0,0 +1,166 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.bytecode.enhance.spi.UnloadedClass;
|
||||||
|
import org.hibernate.event.service.spi.EventListenerRegistry;
|
||||||
|
import org.hibernate.event.spi.EventType;
|
||||||
|
import org.hibernate.event.spi.LoadEvent;
|
||||||
|
import org.hibernate.event.spi.LoadEventListener;
|
||||||
|
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.CustomEnhancementContext;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.EnhancerTestContext;
|
||||||
|
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.GenerationType;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.OneToMany;
|
||||||
|
import javax.persistence.OneToOne;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
import javax.persistence.Version;
|
||||||
|
|
||||||
|
import static org.hibernate.testing.transaction.TransactionUtil.doInJPA;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Christian Beikov
|
||||||
|
*/
|
||||||
|
@TestForIssue( jiraKey = "HHH-14619" )
|
||||||
|
@RunWith( BytecodeEnhancerRunner.class )
|
||||||
|
public class LazyProxyWithCollectionTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
private Long childId;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Class<?>[] getAnnotatedClasses() {
|
||||||
|
return new Class<?>[]{Parent.class, Child.class};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void prepare() {
|
||||||
|
doInJPA( this::sessionFactory, em -> {
|
||||||
|
Child c = new Child();
|
||||||
|
em.persist( c );
|
||||||
|
childId = c.getId();
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReference() {
|
||||||
|
doInJPA( this::sessionFactory, em -> {
|
||||||
|
Child child = em.getReference( Child.class, childId );
|
||||||
|
Parent parent = new Parent();
|
||||||
|
parent.child = child;
|
||||||
|
em.persist( parent );
|
||||||
|
// Class cast exception occurs during auto-flush
|
||||||
|
em.find( Parent.class, parent.getId() );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLazyCollection() {
|
||||||
|
doInJPA( this::sessionFactory, em -> {
|
||||||
|
Child child = em.find( Child.class, childId );
|
||||||
|
Parent parent = new Parent();
|
||||||
|
parent.child = child;
|
||||||
|
em.persist( parent );
|
||||||
|
child.children = new HashSet<>();
|
||||||
|
// Class cast exception occurs during auto-flush
|
||||||
|
em.find( Parent.class, parent.getId() );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- //
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table( name = "PARENT" )
|
||||||
|
private static class Parent {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue( strategy = GenerationType.AUTO )
|
||||||
|
Long id;
|
||||||
|
|
||||||
|
@OneToOne( fetch = FetchType.LAZY )
|
||||||
|
Child child;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Child getChild() {
|
||||||
|
return child;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setChild(Child child) {
|
||||||
|
this.child = child;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table( name = "CHILD" )
|
||||||
|
private static class Child {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue( strategy = GenerationType.AUTO )
|
||||||
|
Long id;
|
||||||
|
@Version
|
||||||
|
Long version;
|
||||||
|
|
||||||
|
String name;
|
||||||
|
|
||||||
|
@OneToMany
|
||||||
|
Set<Child> children = new HashSet<>();
|
||||||
|
|
||||||
|
Child() {
|
||||||
|
// No-arg constructor necessary for proxy factory
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getVersion() {
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVersion(Long version) {
|
||||||
|
this.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<Child> getChildren() {
|
||||||
|
return children;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setChildren(Set<Child> children) {
|
||||||
|
this.children = children;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -117,16 +117,17 @@ public class OnDemandLoadTest extends BaseCoreFunctionalTestCase {
|
||||||
store.getInventories().size();
|
store.getInventories().size();
|
||||||
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
||||||
|
|
||||||
// the extra Session is the temp Session needed to perform the init
|
// the extra Sessions are the temp Sessions needed to perform the init:
|
||||||
assertEquals( 2, sessionFactory().getStatistics().getSessionOpenCount() );
|
// first the entity, then the collection (since it's lazy)
|
||||||
assertEquals( 1, sessionFactory().getStatistics().getSessionCloseCount() );
|
assertEquals( 3, sessionFactory().getStatistics().getSessionOpenCount() );
|
||||||
|
assertEquals( 2, sessionFactory().getStatistics().getSessionCloseCount() );
|
||||||
|
|
||||||
// clear Session again. The collection should still be recognized as initialized from above
|
// clear Session again. The collection should still be recognized as initialized from above
|
||||||
s.clear();
|
s.clear();
|
||||||
assertNotNull( store );
|
assertNotNull( store );
|
||||||
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
||||||
assertEquals( 2, sessionFactory().getStatistics().getSessionOpenCount() );
|
assertEquals( 3, sessionFactory().getStatistics().getSessionOpenCount() );
|
||||||
assertEquals( 1, sessionFactory().getStatistics().getSessionCloseCount() );
|
assertEquals( 2, sessionFactory().getStatistics().getSessionCloseCount() );
|
||||||
|
|
||||||
// lets clear the Session again and this time reload the Store
|
// lets clear the Session again and this time reload the Store
|
||||||
s.clear();
|
s.clear();
|
||||||
|
@ -136,21 +137,22 @@ public class OnDemandLoadTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
// collection should be back to uninitialized since we have a new entity instance
|
// collection should be back to uninitialized since we have a new entity instance
|
||||||
assertFalse( isPropertyInitialized( store, "inventories" ) );
|
assertFalse( isPropertyInitialized( store, "inventories" ) );
|
||||||
assertEquals( 2, sessionFactory().getStatistics().getSessionOpenCount() );
|
assertEquals( 3, sessionFactory().getStatistics().getSessionOpenCount() );
|
||||||
assertEquals( 1, sessionFactory().getStatistics().getSessionCloseCount() );
|
assertEquals( 2, sessionFactory().getStatistics().getSessionCloseCount() );
|
||||||
store.getInventories().size();
|
store.getInventories().size();
|
||||||
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
||||||
|
|
||||||
// the extra Session is the temp Session needed to perform the init
|
// the extra Sessions are the temp Sessions needed to perform the init:
|
||||||
assertEquals( 3, sessionFactory().getStatistics().getSessionOpenCount() );
|
// first the entity, then the collection (since it's lazy)
|
||||||
assertEquals( 2, sessionFactory().getStatistics().getSessionCloseCount() );
|
assertEquals( 5, sessionFactory().getStatistics().getSessionOpenCount() );
|
||||||
|
assertEquals( 4, sessionFactory().getStatistics().getSessionCloseCount() );
|
||||||
|
|
||||||
// clear Session again. The collection should still be recognized as initialized from above
|
// clear Session again. The collection should still be recognized as initialized from above
|
||||||
s.clear();
|
s.clear();
|
||||||
assertNotNull( store );
|
assertNotNull( store );
|
||||||
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
assertTrue( isPropertyInitialized( store, "inventories" ) );
|
||||||
assertEquals( 3, sessionFactory().getStatistics().getSessionOpenCount() );
|
assertEquals( 5, sessionFactory().getStatistics().getSessionOpenCount() );
|
||||||
assertEquals( 2, sessionFactory().getStatistics().getSessionCloseCount() );
|
assertEquals( 4, sessionFactory().getStatistics().getSessionCloseCount() );
|
||||||
} );
|
} );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,111 @@
|
||||||
|
package org.hibernate.test.hql;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.JoinColumn;
|
||||||
|
import javax.persistence.JoinTable;
|
||||||
|
import javax.persistence.MapKeyJoinColumn;
|
||||||
|
import javax.persistence.OneToMany;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
import static org.hamcrest.core.Is.is;
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
||||||
|
import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Burkhard Graves
|
||||||
|
*/
|
||||||
|
@TestForIssue(jiraKey = "HHH-14475")
|
||||||
|
public class IndicesTest extends BaseNonConfigCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class[] getAnnotatedClasses() {
|
||||||
|
return new Class[] {Project.class, Role.class, Person.class};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
doInHibernate( this::sessionFactory, session -> {
|
||||||
|
|
||||||
|
Project project = new Project(1);
|
||||||
|
Role role = new Role(1);
|
||||||
|
|
||||||
|
session.save( project );
|
||||||
|
session.save( role );
|
||||||
|
|
||||||
|
Person person = new Person(1, project, role);
|
||||||
|
|
||||||
|
session.save( person );
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSelectIndices() {
|
||||||
|
doInHibernate( this::sessionFactory, session -> {
|
||||||
|
|
||||||
|
List<Object> result = session.createQuery(
|
||||||
|
"select indices(p.roles) from Person p"
|
||||||
|
).list();
|
||||||
|
|
||||||
|
assertThat( result.size(), is( 1 ) );
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Person")
|
||||||
|
public static class Person {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
@OneToMany
|
||||||
|
@JoinTable(name = "person_to_role",
|
||||||
|
joinColumns = @JoinColumn(name = "person_id"),
|
||||||
|
inverseJoinColumns = @JoinColumn(name = "role_id")
|
||||||
|
)
|
||||||
|
@MapKeyJoinColumn(name = "project_id")
|
||||||
|
private Map<Project, Role> roles;
|
||||||
|
|
||||||
|
public Person() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Person(Integer id, Project project, Role role) {
|
||||||
|
this.id = id;
|
||||||
|
roles = new HashMap<>();
|
||||||
|
roles.put(project, role);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Project")
|
||||||
|
public static class Project {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
public Project() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Project(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Role")
|
||||||
|
public static class Role {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
public Role() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Role(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,187 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.hibernate.test.hql.size;
|
||||||
|
|
||||||
|
import static org.hibernate.testing.transaction.TransactionUtil.doInJPA;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import javax.persistence.CascadeType;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.ManyToOne;
|
||||||
|
import javax.persistence.OneToMany;
|
||||||
|
import javax.persistence.TypedQuery;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.ResultCheckStyle;
|
||||||
|
import org.hibernate.annotations.SQLDelete;
|
||||||
|
import org.hibernate.annotations.Where;
|
||||||
|
import org.hibernate.dialect.H2Dialect;
|
||||||
|
import org.hibernate.dialect.PostgreSQL82Dialect;
|
||||||
|
import org.hibernate.jpa.test.BaseEntityManagerFunctionalTestCase;
|
||||||
|
|
||||||
|
import org.hibernate.testing.RequiresDialect;
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@TestForIssue(jiraKey = "HHH-14585")
|
||||||
|
@RequiresDialect(value = PostgreSQL82Dialect.class, comment = "Other databases may not support boolean data types")
|
||||||
|
@RequiresDialect(value = H2Dialect.class, comment = "Other databases may not support boolean data types")
|
||||||
|
public class WhereClauseOrderBySizeTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class<?>[] getAnnotatedClasses() {
|
||||||
|
return new Class<?>[] { Person.class, Book.class };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSizeAsOrderByExpression() {
|
||||||
|
doInJPA(
|
||||||
|
this::entityManagerFactory,
|
||||||
|
entityManager -> {
|
||||||
|
// initial situation: Alice has 1 book, Bob none
|
||||||
|
final Person alice = new Person( "Alice" );
|
||||||
|
entityManager.persist( alice );
|
||||||
|
|
||||||
|
final Book book1 = new Book();
|
||||||
|
book1.setOwner( alice );
|
||||||
|
entityManager.persist( book1 );
|
||||||
|
|
||||||
|
final Person bob = new Person( "Bob" );
|
||||||
|
entityManager.persist( bob );
|
||||||
|
|
||||||
|
final TypedQuery<Person> orderByBroken = entityManager.createQuery(
|
||||||
|
"SELECT p FROM Person p ORDER BY size(p.books) DESC",
|
||||||
|
Person.class
|
||||||
|
);
|
||||||
|
final TypedQuery<Person> orderByWorking = entityManager.createQuery(
|
||||||
|
"SELECT p FROM Person p ORDER BY p.books.size DESC",
|
||||||
|
Person.class
|
||||||
|
);
|
||||||
|
|
||||||
|
List<Person> dbPeopleBroken = orderByBroken.getResultList();
|
||||||
|
List<Person> dbPeopleWorking = orderByWorking.getResultList();
|
||||||
|
assertEquals( Arrays.asList( alice, bob ), dbPeopleWorking );
|
||||||
|
assertEquals( dbPeopleWorking, dbPeopleBroken );
|
||||||
|
|
||||||
|
// add 2 books to Bob
|
||||||
|
final Book book2 = new Book();
|
||||||
|
book2.setOwner( bob );
|
||||||
|
entityManager.persist( book2 );
|
||||||
|
|
||||||
|
final Book book3 = new Book();
|
||||||
|
book3.setOwner( bob );
|
||||||
|
entityManager.persist( book3 );
|
||||||
|
|
||||||
|
dbPeopleBroken = orderByBroken.getResultList();
|
||||||
|
dbPeopleWorking = orderByWorking.getResultList();
|
||||||
|
assertEquals( Arrays.asList( bob, alice ), dbPeopleWorking );
|
||||||
|
assertEquals( dbPeopleWorking, dbPeopleBroken );
|
||||||
|
|
||||||
|
// remove (soft-deleting) both Bob's books
|
||||||
|
entityManager.remove( book2 );
|
||||||
|
entityManager.remove( book3 );
|
||||||
|
|
||||||
|
// result lists are not equal anymore
|
||||||
|
dbPeopleBroken = orderByBroken.getResultList();
|
||||||
|
dbPeopleWorking = orderByWorking.getResultList();
|
||||||
|
assertEquals( Arrays.asList( alice, bob ), dbPeopleWorking );
|
||||||
|
assertEquals( dbPeopleWorking, dbPeopleBroken );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Person")
|
||||||
|
public static class Person {
|
||||||
|
@Id
|
||||||
|
@GeneratedValue
|
||||||
|
private Long id;
|
||||||
|
private String name;
|
||||||
|
@OneToMany(mappedBy = "owner", fetch = FetchType.LAZY, cascade = CascadeType.REMOVE)
|
||||||
|
private List<Book> books = new ArrayList<>();
|
||||||
|
|
||||||
|
public Person() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Person(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Book> getBooks() {
|
||||||
|
return books;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBooks(List<Book> books) {
|
||||||
|
this.books = books;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "Person{" +
|
||||||
|
"id=" + id +
|
||||||
|
", name='" + name + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Book")
|
||||||
|
@SQLDelete(sql = "UPDATE Book SET deleted = true WHERE id = ?", check = ResultCheckStyle.COUNT)
|
||||||
|
@Where(clause = "deleted = false")
|
||||||
|
public static class Book {
|
||||||
|
@Id
|
||||||
|
@GeneratedValue
|
||||||
|
private Long id;
|
||||||
|
private Boolean deleted = false;
|
||||||
|
@ManyToOne
|
||||||
|
private Person owner;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getDeleted() {
|
||||||
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDeleted(Boolean deleted) {
|
||||||
|
this.deleted = deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Person getOwner() {
|
||||||
|
return owner;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOwner(Person owner) {
|
||||||
|
this.owner = owner;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,60 @@
|
||||||
|
/*
|
||||||
|
* Hibernate Search, full-text search for your domain model
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.hql.size.filter;
|
||||||
|
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.ManyToOne;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.Where;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Where(clause = "deleted = false")
|
||||||
|
public class City {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@ManyToOne
|
||||||
|
private Region region;
|
||||||
|
|
||||||
|
private Boolean deleted;
|
||||||
|
|
||||||
|
public Integer getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Region getRegion() {
|
||||||
|
return region;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRegion(Region region) {
|
||||||
|
this.region = region;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getDeleted() {
|
||||||
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDeleted(Boolean deleted) {
|
||||||
|
this.deleted = deleted;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
/*
|
||||||
|
* Hibernate Search, full-text search for your domain model
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.hql.size.filter;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.OneToMany;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
public class Region {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@OneToMany(mappedBy = "region")
|
||||||
|
private List<City> cities = new ArrayList<>();
|
||||||
|
|
||||||
|
public Integer getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<City> getCities() {
|
||||||
|
return cities;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCities(List<City> cities) {
|
||||||
|
this.cities = cities;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,136 @@
|
||||||
|
/*
|
||||||
|
* Hibernate Search, full-text search for your domain model
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.hql.size.filter;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.hibernate.dialect.AbstractHANADialect;
|
||||||
|
import org.hibernate.dialect.DB2Dialect;
|
||||||
|
import org.hibernate.query.spi.QueryImplementor;
|
||||||
|
|
||||||
|
import org.hibernate.testing.SkipForDialect;
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@TestForIssue(jiraKey = "HHH-14585")
|
||||||
|
public class WhereAnnotatedOneToManySizeTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class<?>[] getAnnotatedClasses() {
|
||||||
|
return new Class<?>[] { Region.class, City.class };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() {
|
||||||
|
Region lazio = new Region();
|
||||||
|
lazio.setId( 1 );
|
||||||
|
lazio.setName( "Lazio" );
|
||||||
|
|
||||||
|
Region lombardy = new Region();
|
||||||
|
lombardy.setId( 2 );
|
||||||
|
lombardy.setName( "Lombardy" );
|
||||||
|
|
||||||
|
City rome = new City();
|
||||||
|
rome.setId( 1 );
|
||||||
|
rome.setName( "Rome" );
|
||||||
|
rome.setDeleted( false );
|
||||||
|
rome.setRegion( lazio );
|
||||||
|
|
||||||
|
City gradoli = new City();
|
||||||
|
gradoli.setId( 2 );
|
||||||
|
gradoli.setName( "Gradoli" );
|
||||||
|
gradoli.setDeleted( true );
|
||||||
|
gradoli.setRegion( lazio );
|
||||||
|
|
||||||
|
City milan = new City();
|
||||||
|
milan.setId( 3 );
|
||||||
|
milan.setName( "Milan" );
|
||||||
|
milan.setDeleted( false );
|
||||||
|
milan.setRegion( lombardy );
|
||||||
|
|
||||||
|
City pavia = new City();
|
||||||
|
pavia.setId( 4 );
|
||||||
|
pavia.setName( "Pavia" );
|
||||||
|
pavia.setDeleted( false );
|
||||||
|
pavia.setRegion( lombardy );
|
||||||
|
|
||||||
|
lazio.getCities().add( rome );
|
||||||
|
lazio.getCities().add( gradoli );
|
||||||
|
|
||||||
|
lombardy.getCities().add( milan );
|
||||||
|
lombardy.getCities().add( pavia );
|
||||||
|
|
||||||
|
inTransaction( session -> {
|
||||||
|
session.persist( lazio );
|
||||||
|
session.persist( lombardy );
|
||||||
|
|
||||||
|
session.persist( rome );
|
||||||
|
session.persist( gradoli );
|
||||||
|
session.persist( milan );
|
||||||
|
session.persist( pavia );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() {
|
||||||
|
inTransaction( session -> {
|
||||||
|
session.createQuery( "DELETE FROM City c" ).executeUpdate();
|
||||||
|
session.createQuery( "DELETE FROM Region c" ).executeUpdate();
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@SkipForDialect(value = DB2Dialect.class, comment = "DB2 does not support correlated subqueries in the ORDER BY clause")
|
||||||
|
@SkipForDialect(value = AbstractHANADialect.class, comment = "HANA db does not support correlated subqueries in the ORDER BY clause")
|
||||||
|
public void orderBy_sizeOf() {
|
||||||
|
inSession( session -> {
|
||||||
|
QueryImplementor<Object[]> query = session.createQuery(
|
||||||
|
"select r, size(r.cities) from Region r order by size(r.cities) desc" );
|
||||||
|
List<Object[]> result = query.getResultList();
|
||||||
|
assertThat( result ).extracting( f -> f[0] ).extracting( "name" ).containsExactly( "Lombardy", "Lazio" );
|
||||||
|
assertThat( result ).extracting( f -> f[1] ).containsExactly( 2, 1 );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@SkipForDialect(value = DB2Dialect.class, comment = "DB2 does not support correlated subqueries in the ORDER BY clause")
|
||||||
|
@SkipForDialect(value = AbstractHANADialect.class, comment = "HANA db does not support correlated subqueries in the ORDER BY clause")
|
||||||
|
public void orderBy_dotSize() {
|
||||||
|
inSession( session -> {
|
||||||
|
QueryImplementor<Object[]> query = session.createQuery(
|
||||||
|
"select r, r.cities.size from Region r order by r.cities.size desc" );
|
||||||
|
List<Object[]> result = query.getResultList();
|
||||||
|
assertThat( result ).extracting( f -> f[0] ).extracting( "name" ).containsExactly( "Lombardy", "Lazio" );
|
||||||
|
assertThat( result ).extracting( f -> f[1] ).containsExactly( 2, 1 );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void project_sizeOf() {
|
||||||
|
inSession( session -> {
|
||||||
|
QueryImplementor<Integer> query = session.createQuery(
|
||||||
|
"SELECT size(r.cities) FROM Region r", Integer.class );
|
||||||
|
List<Integer> cityCounts = query.getResultList();
|
||||||
|
assertThat( cityCounts ).containsExactlyInAnyOrder( 1, 2 );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void project_dotSize() {
|
||||||
|
inSession( session -> {
|
||||||
|
QueryImplementor<Integer> query = session.createQuery(
|
||||||
|
"SELECT r.cities.size FROM Region r", Integer.class );
|
||||||
|
List<Integer> cityCounts = query.getResultList();
|
||||||
|
assertThat( cityCounts ).containsExactlyInAnyOrder( 1, 2 );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,112 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.inheritance.discriminator;
|
||||||
|
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import javax.persistence.DiscriminatorValue;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Inheritance;
|
||||||
|
import javax.persistence.InheritanceType;
|
||||||
|
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
@TestForIssue(jiraKey = "HHH-12445")
|
||||||
|
public class SingleTableNullNotNullDiscriminatorTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class<?>[] getAnnotatedClasses() {
|
||||||
|
return new Class<?>[] {
|
||||||
|
RootEntity.class,
|
||||||
|
Val1Entity.class,
|
||||||
|
Val2Entity.class,
|
||||||
|
NotNullEntity.class
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test() {
|
||||||
|
inTransaction( session -> {
|
||||||
|
Val1Entity val1 = new Val1Entity();
|
||||||
|
val1.setId( 1L );
|
||||||
|
|
||||||
|
Val2Entity val2 = new Val2Entity();
|
||||||
|
val2.setId( 2L );
|
||||||
|
|
||||||
|
RootEntity root = new RootEntity();
|
||||||
|
root.setId( 3L );
|
||||||
|
|
||||||
|
session.persist( val1 );
|
||||||
|
session.persist( val2 );
|
||||||
|
session.persist( root );
|
||||||
|
|
||||||
|
session.doWork( connection -> {
|
||||||
|
try (Statement statement = connection.createStatement()) {
|
||||||
|
statement.executeUpdate(
|
||||||
|
"insert into root_ent (DTYPE, id) " +
|
||||||
|
"values ('other', 4)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} );
|
||||||
|
} );
|
||||||
|
|
||||||
|
inTransaction( session -> {
|
||||||
|
Map<Long, RootEntity> entities = session.createQuery(
|
||||||
|
"select e from root_ent e", RootEntity.class )
|
||||||
|
.getResultList()
|
||||||
|
.stream()
|
||||||
|
.collect( Collectors.toMap( RootEntity::getId, Function.identity() ) );
|
||||||
|
|
||||||
|
assertThat( entities ).extractingByKey( 1L ).isInstanceOf( Val1Entity.class );
|
||||||
|
assertThat( entities ).extractingByKey( 2L ).isInstanceOf( Val2Entity.class );
|
||||||
|
assertThat( entities ).extractingByKey( 3L ).isInstanceOf( RootEntity.class );
|
||||||
|
assertThat( entities ).extractingByKey( 4L ).isInstanceOf( NotNullEntity.class );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "root_ent")
|
||||||
|
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
|
||||||
|
@DiscriminatorValue("null")
|
||||||
|
public static class RootEntity {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "val1_ent")
|
||||||
|
@DiscriminatorValue("val1")
|
||||||
|
public static class Val1Entity extends RootEntity {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "val2_ent")
|
||||||
|
@DiscriminatorValue("val2")
|
||||||
|
public static class Val2Entity extends RootEntity {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "notnull_ent")
|
||||||
|
@DiscriminatorValue("not null")
|
||||||
|
public static class NotNullEntity extends RootEntity {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,114 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.inheritance.discriminator.joinedsubclass;
|
||||||
|
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import javax.persistence.DiscriminatorColumn;
|
||||||
|
import javax.persistence.DiscriminatorValue;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Inheritance;
|
||||||
|
import javax.persistence.InheritanceType;
|
||||||
|
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
@TestForIssue(jiraKey = "HHH-12445")
|
||||||
|
public class JoinedNullNotNullDiscriminatorTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class<?>[] getAnnotatedClasses() {
|
||||||
|
return new Class<?>[] {
|
||||||
|
RootEntity.class,
|
||||||
|
Val1Entity.class,
|
||||||
|
Val2Entity.class,
|
||||||
|
NotNullEntity.class
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test() {
|
||||||
|
inTransaction( session -> {
|
||||||
|
Val1Entity val1 = new Val1Entity();
|
||||||
|
val1.setId( 1L );
|
||||||
|
|
||||||
|
Val2Entity val2 = new Val2Entity();
|
||||||
|
val2.setId( 2L );
|
||||||
|
|
||||||
|
RootEntity root = new RootEntity();
|
||||||
|
root.setId( 3L );
|
||||||
|
|
||||||
|
session.persist( val1 );
|
||||||
|
session.persist( val2 );
|
||||||
|
session.persist( root );
|
||||||
|
|
||||||
|
session.doWork( connection -> {
|
||||||
|
try (Statement statement = connection.createStatement()) {
|
||||||
|
statement.executeUpdate(
|
||||||
|
"insert into root_ent (DTYPE, id) " +
|
||||||
|
"values ('other', 4)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} );
|
||||||
|
} );
|
||||||
|
|
||||||
|
inTransaction( session -> {
|
||||||
|
Map<Long, RootEntity> entities = session.createQuery(
|
||||||
|
"select e from root_ent e", RootEntity.class )
|
||||||
|
.getResultList()
|
||||||
|
.stream()
|
||||||
|
.collect( Collectors.toMap( RootEntity::getId, Function.identity() ) );
|
||||||
|
|
||||||
|
assertThat( entities ).extractingByKey( 1L ).isInstanceOf( Val1Entity.class );
|
||||||
|
assertThat( entities ).extractingByKey( 2L ).isInstanceOf( Val2Entity.class );
|
||||||
|
assertThat( entities ).extractingByKey( 3L ).isInstanceOf( RootEntity.class );
|
||||||
|
assertThat( entities ).extractingByKey( 4L ).isInstanceOf( NotNullEntity.class );
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "root_ent")
|
||||||
|
@Inheritance(strategy = InheritanceType.JOINED)
|
||||||
|
@DiscriminatorColumn()
|
||||||
|
@DiscriminatorValue("null")
|
||||||
|
public static class RootEntity {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "val1_ent")
|
||||||
|
@DiscriminatorValue("val1")
|
||||||
|
public static class Val1Entity extends RootEntity {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "val2_ent")
|
||||||
|
@DiscriminatorValue("val2")
|
||||||
|
public static class Val2Entity extends RootEntity {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "notnull_ent")
|
||||||
|
@DiscriminatorValue("not null")
|
||||||
|
public static class NotNullEntity extends RootEntity {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,92 @@
|
||||||
|
package org.hibernate.test.optlock;
|
||||||
|
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.LockModeType;
|
||||||
|
import javax.persistence.Version;
|
||||||
|
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.cfg.Configuration;
|
||||||
|
|
||||||
|
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class OptimisticLockWithGloballyQuotedIdentifierTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void configure(Configuration configuration) {
|
||||||
|
configuration.setProperty( AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, "true" );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class<?>[] getAnnotatedClasses() {
|
||||||
|
return new Class[] { Person.class };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
Person person = new Person( "1", "Fabiana" );
|
||||||
|
session.persist( person );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() {
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
session.createQuery( "delete from Person" ).executeUpdate();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHqlQueryWithOptimisticLock() {
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
session.createQuery( "from Person e", Person.class )
|
||||||
|
.setLockMode( LockModeType.OPTIMISTIC )
|
||||||
|
.getResultList().get( 0 );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Person")
|
||||||
|
public static class Person {
|
||||||
|
@Id
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
@Version
|
||||||
|
private long version;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
public Person() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Person(String id, String name) {
|
||||||
|
this.id = id;
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,89 @@
|
||||||
|
package org.hibernate.test.optlock;
|
||||||
|
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.LockModeType;
|
||||||
|
import javax.persistence.Version;
|
||||||
|
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.cfg.Configuration;
|
||||||
|
|
||||||
|
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class OptimisticLockWithQuotedVersionTest extends BaseCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Class<?>[] getAnnotatedClasses() {
|
||||||
|
return new Class[] { Person.class };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
Person person = new Person( "1", "Fabiana" );
|
||||||
|
session.persist( person );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() {
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
session.createQuery( "delete from Person" ).executeUpdate();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHqlQueryWithOptimisticLock() {
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
session.createQuery( "from Person e", Person.class )
|
||||||
|
.setLockMode( LockModeType.OPTIMISTIC )
|
||||||
|
.getResultList().get( 0 );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Person")
|
||||||
|
public static class Person {
|
||||||
|
@Id
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
@Version
|
||||||
|
@Column(name = "`version`")
|
||||||
|
private long version;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
public Person() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Person(String id, String name) {
|
||||||
|
this.id = id;
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,88 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema.internal;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.hibernate.boot.Metadata;
|
||||||
|
import org.hibernate.boot.model.TruthValue;
|
||||||
|
import org.hibernate.boot.model.naming.Identifier;
|
||||||
|
import org.hibernate.boot.model.relational.Namespace;
|
||||||
|
import org.hibernate.boot.model.relational.QualifiedTableName;
|
||||||
|
import org.hibernate.dialect.Dialect;
|
||||||
|
import org.hibernate.engine.jdbc.internal.Formatter;
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.tool.schema.extract.internal.ColumnInformationImpl;
|
||||||
|
import org.hibernate.tool.schema.extract.internal.ForeignKeyInformationImpl;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
|
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
||||||
|
import org.hibernate.tool.schema.spi.ContributableMatcher;
|
||||||
|
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonList;
|
||||||
|
import static org.hamcrest.core.Is.is;
|
||||||
|
import static org.hibernate.boot.model.naming.Identifier.toIdentifier;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
import static org.mockito.Mockito.doReturn;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Emmanuel Duchastenier
|
||||||
|
*/
|
||||||
|
public class AbstractSchemaMigratorTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@TestForIssue(jiraKey = "HHH-13779")
|
||||||
|
public void testForeignKeyPreExistenceDetectionIgnoresCaseForTableAndColumnName() {
|
||||||
|
final AbstractSchemaMigrator schemaMigrator = new AbstractSchemaMigrator(null, null) {
|
||||||
|
@Override
|
||||||
|
protected NameSpaceTablesInformation performTablesMigration(
|
||||||
|
Metadata metadata,
|
||||||
|
DatabaseInformation existingDatabase,
|
||||||
|
ExecutionOptions options,
|
||||||
|
ContributableMatcher contributableInclusionFilter,
|
||||||
|
Dialect dialect,
|
||||||
|
Formatter formatter,
|
||||||
|
Set<String> exportIdentifiers,
|
||||||
|
boolean tryToCreateCatalogs,
|
||||||
|
boolean tryToCreateSchemas,
|
||||||
|
Set<Identifier> exportedCatalogs,
|
||||||
|
Namespace namespace,
|
||||||
|
GenerationTarget[] targets) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
final TableInformation existingTableInformation = mock(TableInformation.class);
|
||||||
|
final ArrayList<ForeignKeyInformation.ColumnReferenceMapping> columnReferenceMappings = new ArrayList<>();
|
||||||
|
|
||||||
|
final TableInformation destinationTableInformation = mock(TableInformation.class);
|
||||||
|
doReturn(new QualifiedTableName(toIdentifier("catalog"), toIdentifier("schema"),
|
||||||
|
toIdentifier("referenced_table"))) // Table name is lower case
|
||||||
|
.when(destinationTableInformation).getName();
|
||||||
|
columnReferenceMappings.add(new ForeignKeyInformationImpl.ColumnReferenceMappingImpl(
|
||||||
|
new ColumnInformationImpl(null, toIdentifier("referencing_column"), // column name is lower case
|
||||||
|
0, "typeName", 255, 0, TruthValue.TRUE),
|
||||||
|
new ColumnInformationImpl(destinationTableInformation, null, 1, "typeName", 0, 0, TruthValue.TRUE)));
|
||||||
|
doReturn(singletonList(new ForeignKeyInformationImpl(toIdentifier("FKp8mpamfw2inhj88hwhty1eipm"), columnReferenceMappings)))
|
||||||
|
.when(existingTableInformation).getForeignKeys();
|
||||||
|
|
||||||
|
final boolean existInDatabase = schemaMigrator.equivalentForeignKeyExistsInDatabase(
|
||||||
|
existingTableInformation,
|
||||||
|
"REFERENCING_COLUMN", "REFERENCED_TABLE"); // Table and column names are UPPER-case here, to prove the test
|
||||||
|
|
||||||
|
assertThat("Expected ForeignKey pre-existence check to be case-insensitive",
|
||||||
|
existInDatabase,
|
||||||
|
is(true));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
INSERT INTO Item(name) VALUES('multi-file-test');
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue