HHH-15626 Move old databases to nightly pipeline and add some SQL Server 2022 features

This commit is contained in:
Christian Beikov 2022-11-03 15:15:01 +01:00
parent dd5c455d88
commit 4914d14a43
95 changed files with 1732 additions and 761 deletions

View File

@ -39,10 +39,8 @@ jobs:
- rdbms: hsqldb
- rdbms: derby
- rdbms: mysql
- rdbms: mysql8
- rdbms: mariadb
- rdbms: postgresql
- rdbms: postgresql_14
- rdbms: edb
- rdbms: oracle
- rdbms: db2

110
Jenkinsfile vendored
View File

@ -26,14 +26,24 @@ this.helper = new JobHelper(this)
helper.runWithNotification {
stage('Configure') {
this.environments = [
// TODO: this block is just temporary for testing, but should ultimately be removed because this is tested nightly
new BuildEnvironment( dbName: 'h2_1_4' ),
new BuildEnvironment( dbName: 'hsqldb_2_6' ),
new BuildEnvironment( dbName: 'derby_10_14' ),
new BuildEnvironment( dbName: 'mysql_5_7' ),
new BuildEnvironment( dbName: 'mariadb_10_3' ),
new BuildEnvironment( dbName: 'postgresql_10' ),
new BuildEnvironment( dbName: 'edb_10' ),
new BuildEnvironment( dbName: 'oracle_11_2' ),
new BuildEnvironment( dbName: 'db2_10_5', longRunning: true ),
new BuildEnvironment( dbName: 'mssql_2017' ),
// new BuildEnvironment( dbName: 'h2' ),
// new BuildEnvironment( dbName: 'hsqldb' ),
// new BuildEnvironment( dbName: 'derby' ),
// new BuildEnvironment( dbName: 'mysql' ),
// new BuildEnvironment( dbName: 'mysql8' ),
// new BuildEnvironment( dbName: 'mariadb' ),
// new BuildEnvironment( dbName: 'postgresql' ),
// new BuildEnvironment( dbName: 'postgresql_14' ),
// new BuildEnvironment( dbName: 'edb' ),
// new BuildEnvironment( dbName: 'oracle' ),
// new BuildEnvironment( dbName: 'db2' ),
@ -126,55 +136,84 @@ stage('Build') {
try {
stage('Start database') {
switch (buildEnv.dbName) {
case "cockroachdb":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('cockroachdb/cockroach:v21.1.21').pull()
}
sh "./docker_db.sh cockroachdb"
state[buildEnv.tag]['containerName'] = "cockroach"
case "h2_1_4":
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
" -Pgradle.libs.versions.h2=1.4.197 -Pgradle.libs.versions.h2gis=1.5.0"
break;
case "hsqldb_2_6":
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
" -Pgradle.libs.versions.hsqldb=2.6.1"
break;
case "derby_10_14":
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
" -Pgradle.libs.versions.derby=10.14.2.0"
break;
case "mysql":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mysql:5.7').pull()
docker.image('mysql:8.0.31').pull()
}
sh "./docker_db.sh mysql"
state[buildEnv.tag]['containerName'] = "mysql"
break;
case "mysql8":
case "mysql_5_7":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mysql:8.0.21').pull()
docker.image('mysql:5.7.40').pull()
}
sh "./docker_db.sh mysql_8_0"
sh "./docker_db.sh mysql_5_7"
state[buildEnv.tag]['containerName'] = "mysql"
break;
case "mariadb":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mariadb:10.7.5').pull()
docker.image('mariadb:10.9.3').pull()
}
sh "./docker_db.sh mariadb"
state[buildEnv.tag]['containerName'] = "mariadb"
break;
case "mariadb_10_3":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mariadb:10.3.36').pull()
}
sh "./docker_db.sh mariadb_10_3"
state[buildEnv.tag]['containerName'] = "mariadb"
break;
case "postgresql":
// use the postgis image to enable the PGSQL GIS (spatial) extension
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('postgis/postgis:9.5-2.5').pull()
docker.image('postgis/postgis:14-3.3').pull()
}
sh "./docker_db.sh postgresql"
state[buildEnv.tag]['containerName'] = "postgres"
break;
case "postgresql_14":
case "postgresql_10":
// use the postgis image to enable the PGSQL GIS (spatial) extension
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('postgis/postgis:14-3.3').pull()
docker.image('postgis/postgis:10-2.5').pull()
}
sh "./docker_db.sh postgresql_14"
sh "./docker_db.sh postgresql_10"
state[buildEnv.tag]['containerName'] = "postgres"
break;
case "edb":
docker.image('quay.io/enterprisedb/edb-postgres-advanced:14.5-3.2-postgis').pull()
sh "./docker_db.sh edb"
state[buildEnv.tag]['containerName'] = "edb"
break;
case "edb_10":
docker.image('quay.io/enterprisedb/edb-postgres-advanced:10.22').pull()
sh "./docker_db.sh edb_10"
state[buildEnv.tag]['containerName'] = "edb"
break;
case "oracle":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('gvenzl/oracle-xe:18.4.0-full').pull()
docker.image('gvenzl/oracle-xe:21.3.0-full').pull()
}
sh "./docker_db.sh oracle_18"
sh "./docker_db.sh oracle"
state[buildEnv.tag]['containerName'] = "oracle"
break;
case "oracle_11_2":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('gvenzl/oracle-xe:11.2.0.2-full').pull()
}
sh "./docker_db.sh oracle_11"
state[buildEnv.tag]['containerName'] = "oracle"
break;
case "db2":
@ -184,11 +223,23 @@ stage('Build') {
sh "./docker_db.sh db2"
state[buildEnv.tag]['containerName'] = "db2"
break;
case "db2_10_5":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('ibmoms/db2express-c@sha256:a499afd9709a1f69fb41703e88def9869955234c3525547e2efc3418d1f4ca2b').pull()
}
sh "./docker_db.sh db2_10_5"
state[buildEnv.tag]['containerName'] = "db2"
break;
case "mssql":
docker.image('mcr.microsoft.com/mssql/server@sha256:f54a84b8a802afdfa91a954e8ddfcec9973447ce8efec519adf593b54d49bedf').pull()
sh "./docker_db.sh mssql"
state[buildEnv.tag]['containerName'] = "mssql"
break;
case "mssql_2017":
docker.image('mcr.microsoft.com/mssql/server@sha256:7d194c54e34cb63bca083542369485c8f4141596805611e84d8c8bab2339eede').pull()
sh "./docker_db.sh mssql_2017"
state[buildEnv.tag]['containerName'] = "mssql"
break;
case "sybase":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('nguoianphu/docker-sybase').pull()
@ -196,10 +247,19 @@ stage('Build') {
sh "./docker_db.sh sybase"
state[buildEnv.tag]['containerName'] = "sybase"
break;
case "edb":
docker.image('quay.io/enterprisedb/edb-postgres-advanced:10.22').pull()
sh "./docker_db.sh edb"
state[buildEnv.tag]['containerName'] = "edb"
case "cockroachdb":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('cockroachdb/cockroach:v22.1.10').pull()
}
sh "./docker_db.sh cockroachdb"
state[buildEnv.tag]['containerName'] = "cockroach"
break;
case "cockroachdb_21_2":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('cockroachdb/cockroach:v21.2.16').pull()
}
sh "./docker_db.sh cockroachdb_21_2"
state[buildEnv.tag]['containerName'] = "cockroach"
break;
}
}
@ -208,7 +268,7 @@ stage('Build') {
withEnv(["RDBMS=${buildEnv.dbName}"]) {
try {
if (buildEnv.dbLockableResource == null) {
timeout( [time: buildEnv.longRunning ? 240 : 120, unit: 'MINUTES'] ) {
timeout( [time: buildEnv.longRunning ? 480 : 120, unit: 'MINUTES'] ) {
sh cmd
}
}
@ -217,7 +277,7 @@ stage('Build') {
if ( buildEnv.dbLockResourceAsHost ) {
cmd += " -DdbHost=${LOCKED_RESOURCE}"
}
timeout( [time: buildEnv.longRunning ? 240 : 120, unit: 'MINUTES'] ) {
timeout( [time: buildEnv.longRunning ? 480 : 120, unit: 'MINUTES'] ) {
sh cmd
}
}

View File

@ -161,26 +161,18 @@ The following table illustrates a list of commands for various databases that ca
|-
|`./gradlew test -Pdb=derby`
|MySQL 5.7
|MySQL
|`./docker_db.sh mysql`
|`./gradlew test -Pdb=mysql_ci`
|MySQL 8.0
|`./docker_db.sh mysql_8_0`
|`./gradlew test -Pdb=mysql_ci`
|MariaDB
|`./docker_db.sh mariadb`
|`./gradlew test -Pdb=mariadb_ci`
|PostgreSQL 10
|PostgreSQL
|`./docker_db.sh postgresql`
|`./gradlew test -Pdb=pgsql_ci`
|PostgreSQL 14
|`./docker_db.sh postgresql_14`
|`./gradlew test -Pdb=pgsql_ci`
|EnterpriseDB
|`./docker_db.sh edb`
|`./gradlew test -Pdb=edb_ci`
@ -189,22 +181,6 @@ The following table illustrates a list of commands for various databases that ca
|`./docker_db.sh oracle`
|`./gradlew test -Pdb=oracle_ci`
|Oracle 11g
|`./docker_db.sh oracle_11`
|`./gradlew test -Pdb=oracle_ci`
|Oracle XE 18
|`./docker_db.sh oracle_18`
|`./gradlew test -Pdb=oracle_ci`
|Oracle XE 21
|`./docker_db.sh oracle_21`
|`./gradlew test -Pdb=oracle_ci`
|Oracle EE
|`./docker_db.sh oracle_ee`
|`./gradlew test -Pdb=oracle_docker`
|DB2
|`./docker_db.sh db2`
|`./gradlew test -Pdb=db2_ci`

View File

@ -1,44 +1,39 @@
#! /bin/bash
goal=
if [ "$RDBMS" == "h2" ]; then
if [ "$RDBMS" == "h2" ] || [ "$RDBMS" == "h2_1_4" ]; then
# This is the default.
goal=""
elif [ "$RDBMS" == "hsqldb" ] || [ "$RDBMS" == "hsqldb_2_6" ]; then
goal="-Pdb=hsqldb"
elif [ "$RDBMS" == "derby" ]; then
goal="-Pdb=derby"
elif [ "$RDBMS" == "edb" ]; then
goal="-Pdb=edb_ci -DdbHost=localhost:5444"
elif [ "$RDBMS" == "hsqldb" ]; then
goal="-Pdb=hsqldb"
elif [ "$RDBMS" == "mysql8" ]; then
elif [ "$RDBMS" == "derby_10_14" ]; then
goal="-Pdb=derby_old"
elif [ "$RDBMS" == "mysql" ] || [ "$RDBMS" == "mysql_5_7" ]; then
goal="-Pdb=mysql_ci"
elif [ "$RDBMS" == "mysql" ]; then
goal="-Pdb=mysql_ci"
elif [ "$RDBMS" == "mariadb" ]; then
elif [ "$RDBMS" == "mariadb" ] || [ "$RDBMS" == "mariadb_10_3" ]; then
goal="-Pdb=mariadb_ci"
elif [ "$RDBMS" == "postgresql" ]; then
elif [ "$RDBMS" == "postgresql" ] || [ "$RDBMS" == "postgresql_10" ]; then
goal="-Pdb=pgsql_ci"
elif [ "$RDBMS" == "postgresql_14" ]; then
goal="-Pdb=pgsql_ci"
elif [ "$RDBMS" == "oracle" ]; then
# I have no idea why, but these tests don't work on GH Actions
# yrodiere: Apparently those have been disabled on Jenkins as well...
elif [ "$RDBMS" == "edb" ] || [ "$RDBMS" == "edb_10" ]; then
goal="-Pdb=edb_ci -DdbHost=localhost:5444"
elif [ "$RDBMS" == "oracle" ] || [ "$RDBMS" == "oracle_11_2" ]; then
# I have no idea why, but these tests don't seem to work on CI...
goal="-Pdb=oracle_ci -PexcludeTests=**.LockTest.testQueryTimeout*"
elif [ "$RDBMS" == "oracle_ee" ]; then
goal="-Pdb=oracle_jenkins"
elif [ "$RDBMS" == "db2" ]; then
goal="-Pdb=db2_ci"
elif [ "$RDBMS" == "mssql" ]; then
elif [ "$RDBMS" == "db2_10_5" ]; then
goal="-Pdb=db2"
elif [ "$RDBMS" == "mssql" ] || [ "$RDBMS" == "mssql_2017" ]; then
goal="-Pdb=mssql_ci"
elif [ "$RDBMS" == "hana" ]; then
goal="-Pdb=hana_ci"
elif [ "$RDBMS" == "hana_cloud" ]; then
goal="-Pdb=hana_cloud"
elif [ "$RDBMS" == "sybase" ]; then
goal="-Pdb=sybase_ci"
elif [ "$RDBMS" == "tidb" ]; then
goal="-Pdb=tidb"
elif [ "$RDBMS" == "cockroachdb" ]; then
elif [ "$RDBMS" == "hana_cloud" ]; then
goal="-Pdb=hana_cloud"
elif [ "$RDBMS" == "cockroachdb" ] || [ "$RDBMS" == "cockroachdb_21_2" ]; then
goal="-Pdb=cockroachdb"
fi

View File

@ -4,26 +4,22 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
if [ "$RDBMS" == 'mysql' ]; then
bash $DIR/../docker_db.sh mysql
elif [ "$RDBMS" == 'mysql8' ]; then
bash $DIR/../docker_db.sh mysql_8_0
elif [ "$RDBMS" == 'mariadb' ]; then
bash $DIR/../docker_db.sh mariadb
elif [ "$RDBMS" == 'postgresql' ]; then
bash $DIR/../docker_db.sh postgresql
elif [ "$RDBMS" == 'postgresql_14' ]; then
bash $DIR/../docker_db.sh postgresql_14
elif [ "$RDBMS" == 'edb' ]; then
bash $DIR/../docker_db.sh edb
elif [ "$RDBMS" == 'db2' ]; then
bash $DIR/../docker_db.sh db2
elif [ "$RDBMS" == 'oracle' ]; then
bash $DIR/../docker_db.sh oracle_18
bash $DIR/../docker_db.sh oracle
elif [ "$RDBMS" == 'mssql' ]; then
bash $DIR/../docker_db.sh mssql
elif [ "$RDBMS" == 'hana' ]; then
bash $DIR/../docker_db.sh hana
elif [ "$RDBMS" == 'sybase' ]; then
bash $DIR/../docker_db.sh sybase
elif [ "$RDBMS" == 'cockroachdb' ]; then
bash $DIR/../docker_db.sh cockroachdb
elif [ "$RDBMS" == 'hana' ]; then
bash $DIR/../docker_db.sh hana
fi

View File

@ -16,12 +16,12 @@ else
fi
mysql() {
mysql_5_7
mysql_8_0
}
mysql_5_7() {
$CONTAINER_CLI rm -f mysql || true
$CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mysql:5.7 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --log-bin-trust-function-creators=1
$CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mysql:5.7.40 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --log-bin-trust-function-creators=1
# Give the container some time to start
OUTPUT=
n=0
@ -45,7 +45,7 @@ mysql_5_7() {
mysql_8_0() {
$CONTAINER_CLI rm -f mysql || true
$CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mysql:8.0.21 --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1
$CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mysql:8.0.31 --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1
# Give the container some time to start
OUTPUT=
n=0
@ -68,8 +68,35 @@ mysql_8_0() {
}
mariadb() {
mariadb_10_9
}
mariadb_10_3() {
$CONTAINER_CLI rm -f mariadb || true
$CONTAINER_CLI run --name mariadb -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mariadb:10.7.5 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake
$CONTAINER_CLI run --name mariadb -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mariadb:10.3.36 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake
OUTPUT=
n=0
until [ "$n" -ge 5 ]
do
# Need to access STDERR. Thanks for the snippet https://stackoverflow.com/a/56577569/412446
{ OUTPUT="$( { $CONTAINER_CLI logs mariadb; } 2>&1 1>&3 3>&- )"; } 3>&1;
if [[ $OUTPUT == *"ready for connections"* ]]; then
break;
fi
n=$((n+1))
echo "Waiting for MariaDB to start..."
sleep 3
done
if [ "$n" -ge 5 ]; then
echo "MariaDB failed to start and configure after 15 seconds"
else
echo "MariaDB successfully started"
fi
}
mariadb_10_9() {
$CONTAINER_CLI rm -f mariadb || true
$CONTAINER_CLI run --name mariadb -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mariadb:10.9.3 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake
OUTPUT=
n=0
until [ "$n" -ge 5 ]
@ -91,7 +118,7 @@ mariadb() {
}
postgresql() {
postgresql_10
postgresql_14
}
postgresql_9_5() {
@ -115,19 +142,28 @@ postgresql_14() {
}
edb() {
edb_10
edb_14
}
edb_10() {
$CONTAINER_CLI rm -f edb || true
# The version of the base image can be seen and updated in ./edb/Dockerfile
# We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
(cd edb; $CONTAINER_CLI build -t edb-test:latest .)
$CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d edb-test:latest
(cd edb; $CONTAINER_CLI build -t edb-test:10 -f edb10.Dockerfile .)
$CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d edb-test:10
}
edb_14() {
$CONTAINER_CLI rm -f edb || true
# We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
(cd edb; $CONTAINER_CLI build -t edb-test:14 -f edb14.Dockerfile .)
$CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d edb-test:14
}
db2() {
echo $CONTAINER_CLI
db2_11_5
}
db2_11_5() {
$PRIVILEGED_CLI $CONTAINER_CLI rm -f db2 || true
$PRIVILEGED_CLI $CONTAINER_CLI run --name db2 --privileged -e DB2INSTANCE=orm_test -e DB2INST1_PASSWORD=orm_test -e DBNAME=orm_test -e LICENSE=accept -e AUTOCONFIG=false -e ARCHIVE_LOGS=false -e TO_CREATE_SAMPLEDB=false -e REPODB=false -p 50000:50000 -d docker.io/ibmcom/db2:11.5.7.0
# Give the container some time to start
@ -140,6 +176,28 @@ db2() {
$PRIVILEGED_CLI $CONTAINER_CLI exec -t db2 su - orm_test bash -c ". /database/config/orm_test/sqllib/db2profile && /database/config/orm_test/sqllib/bin/db2 'connect to orm_test' && /database/config/orm_test/sqllib/bin/db2 'CREATE USER TEMPORARY TABLESPACE usr_tbsp MANAGED BY AUTOMATIC STORAGE'"
}
db2_10_5() {
$PRIVILEGED_CLI $CONTAINER_CLI rm -f db2 || true
# The sha represents the tag 10.5.0.5-3.10.0
$PRIVILEGED_CLI $CONTAINER_CLI run --name db2 --privileged -e DB2INST1_PASSWORD=db2inst1-pwd -e LICENSE=accept -p 50000:50000 -d docker.io/ibmoms/db2express-c@sha256:a499afd9709a1f69fb41703e88def9869955234c3525547e2efc3418d1f4ca2b db2start
# Give the container some time to start
OUTPUT=
while [[ $OUTPUT != *"DB2START"* ]]; do
echo "Waiting for DB2 to start..."
sleep 10
OUTPUT=$($PRIVILEGED_CLI $CONTAINER_CLI logs db2)
done
$PRIVILEGED_CLI $CONTAINER_CLI exec -t db2 su - db2inst1 bash -c "/home/db2inst1/sqllib/bin/db2 create database orm_test &&
/home/db2inst1/sqllib/bin/db2 'connect to orm_test' &&
/home/db2inst1/sqllib/bin/db2 'CREATE BUFFERPOOL BP8K pagesize 8K' &&
/home/db2inst1/sqllib/bin/db2 'CREATE SYSTEM TEMPORARY TABLESPACE STB_8 PAGESIZE 8K BUFFERPOOL BP8K' &&
/home/db2inst1/sqllib/bin/db2 'CREATE BUFFERPOOL BP16K pagesize 16K' &&
/home/db2inst1/sqllib/bin/db2 'CREATE SYSTEM TEMPORARY TABLESPACE STB_16 PAGESIZE 16K BUFFERPOOL BP16K' &&
/home/db2inst1/sqllib/bin/db2 'CREATE BUFFERPOOL BP32K pagesize 32K' &&
/home/db2inst1/sqllib/bin/db2 'CREATE SYSTEM TEMPORARY TABLESPACE STB_32 PAGESIZE 32K BUFFERPOOL BP32K' &&
/home/db2inst1/sqllib/bin/db2 'CREATE USER TEMPORARY TABLESPACE usr_tbsp MANAGED BY AUTOMATIC STORAGE'"
}
db2_spatial() {
$PRIVILEGED_CLI $CONTAINER_CLI rm -f db2spatial || true
temp_dir=$(mktemp -d)
@ -200,9 +258,35 @@ EOF
}
mssql() {
mssql_2022
}
mssql_2017() {
$CONTAINER_CLI rm -f mssql || true
#This sha256 matches a specific tag of mcr.microsoft.com/mssql/server:2019-latest :
$CONTAINER_CLI run --name mssql -d -p 1433:1433 -e "SA_PASSWORD=Hibernate_orm_test" -e ACCEPT_EULA=Y mcr.microsoft.com/mssql/server@sha256:f54a84b8a802afdfa91a954e8ddfcec9973447ce8efec519adf593b54d49bedf
#This sha256 matches a specific tag of mcr.microsoft.com/mssql/server:2017-latest :
$CONTAINER_CLI run --name mssql -d -p 1433:1433 -e "SA_PASSWORD=Hibernate_orm_test" -e ACCEPT_EULA=Y mcr.microsoft.com/mssql/server@sha256:7d194c54e34cb63bca083542369485c8f4141596805611e84d8c8bab2339eede
sleep 5
n=0
until [ "$n" -ge 5 ]
do
# We need a database that uses a non-lock based MVCC approach
# https://github.com/microsoft/homebrew-mssql-release/issues/2#issuecomment-682285561
$CONTAINER_CLI exec mssql bash -c 'echo "create database hibernate_orm_test collate SQL_Latin1_General_CP1_CS_AS; alter database hibernate_orm_test set READ_COMMITTED_SNAPSHOT ON" | /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Hibernate_orm_test -i /dev/stdin' && break
echo "Waiting for SQL Server to start..."
n=$((n+1))
sleep 5
done
if [ "$n" -ge 5 ]; then
echo "SQL Server failed to start and configure after 25 seconds"
else
echo "SQL Server successfully started"
fi
}
mssql_2022() {
$CONTAINER_CLI rm -f mssql || true
#This sha256 matches a specific tag of mcr.microsoft.com/mssql/server:2022-latest :
$CONTAINER_CLI run --name mssql -d -p 1433:1433 -e "SA_PASSWORD=Hibernate_orm_test" -e ACCEPT_EULA=Y mcr.microsoft.com/mssql/server@sha256:5439be9edc3b514cf647bcd3651779fa13f487735a985f40cbdcfecc60fea273
sleep 5
n=0
until [ "$n" -ge 5 ]
@ -362,13 +446,13 @@ oracle_setup() {
# We increase file sizes to avoid online resizes as that requires lots of CPU which is restricted in XE
$CONTAINER_CLI exec oracle bash -c "source /home/oracle/.bashrc; bash -c \"
cat <<EOF | \$ORACLE_HOME/bin/sqlplus sys/Oracle18@localhost/XE as sysdba
alter database tempfile '/opt/oracle/oradata/XE/temp01.dbf' resize 400M;
alter database datafile '/opt/oracle/oradata/XE/system01.dbf' resize 1000M;
alter database datafile '/opt/oracle/oradata/XE/sysaux01.dbf' resize 600M;
alter database datafile '/opt/oracle/oradata/XE/undotbs01.dbf' resize 300M;
alter database add logfile group 4 '/opt/oracle/oradata/XE/redo04.log' size 500M reuse;
alter database add logfile group 5 '/opt/oracle/oradata/XE/redo05.log' size 500M reuse;
alter database add logfile group 6 '/opt/oracle/oradata/XE/redo06.log' size 500M reuse;
alter database tempfile '\$ORACLE_BASE/oradata/XE/temp01.dbf' resize 400M;
alter database datafile '\$ORACLE_BASE/oradata/XE/system01.dbf' resize 1000M;
alter database datafile '\$ORACLE_BASE/oradata/XE/sysaux01.dbf' resize 600M;
alter database datafile '\$ORACLE_BASE/oradata/XE/undotbs01.dbf' resize 300M;
alter database add logfile group 4 '\$ORACLE_BASE/oradata/XE/redo04.log' size 500M reuse;
alter database add logfile group 5 '\$ORACLE_BASE/oradata/XE/redo05.log' size 500M reuse;
alter database add logfile group 6 '\$ORACLE_BASE/oradata/XE/redo06.log' size 500M reuse;
alter system switch logfile;
alter system switch logfile;
@ -382,16 +466,65 @@ alter system set open_cursors=1000 sid='*' scope=both;
EOF\""
}
oracle_legacy() {
$CONTAINER_CLI rm -f oracle || true
# We need to use the defaults
# SYSTEM/Oracle18
$CONTAINER_CLI run --shm-size=1536m --name oracle -d -p 1521:1521 --ulimit nofile=1048576:1048576 docker.io/quillbuilduser/oracle-18-xe
oracle_setup
oracle_setup_old() {
HEALTHSTATUS=
until [ "$HEALTHSTATUS" == "healthy" ];
do
echo "Waiting for Oracle to start..."
sleep 5;
# On WSL, health-checks intervals don't work for Podman, so run them manually
if command -v podman > /dev/null; then
$CONTAINER_CLI healthcheck run oracle > /dev/null
fi
HEALTHSTATUS="`$CONTAINER_CLI inspect -f $HEALTCHECK_PATH oracle`"
HEALTHSTATUS=${HEALTHSTATUS##+( )} #Remove longest matching series of spaces from the front
HEALTHSTATUS=${HEALTHSTATUS%%+( )} #Remove longest matching series of spaces from the back
done
# We increase file sizes to avoid online resizes as that requires lots of CPU which is restricted in XE
$CONTAINER_CLI exec oracle bash -c "source /home/oracle/.bashrc; bash -c \"
cat <<EOF | \$ORACLE_HOME/bin/sqlplus sys/Oracle18@localhost/XE as sysdba
alter database tempfile '\$ORACLE_BASE/oradata/XE/temp.dbf' resize 400M;
alter database datafile '\$ORACLE_BASE/oradata/XE/system.dbf' resize 1000M;
alter database datafile '\$ORACLE_BASE/oradata/XE/sysaux.dbf' resize 700M;
alter database datafile '\$ORACLE_BASE/oradata/XE/undotbs1.dbf' resize 300M;
alter database add logfile group 4 '\$ORACLE_BASE/oradata/XE/redo04.log' size 500M reuse;
alter database add logfile group 5 '\$ORACLE_BASE/oradata/XE/redo05.log' size 500M reuse;
alter database add logfile group 6 '\$ORACLE_BASE/oradata/XE/redo06.log' size 500M reuse;
alter system switch logfile;
alter system switch logfile;
alter system switch logfile;
alter system checkpoint;
alter database drop logfile group 1;
alter database drop logfile group 2;
alter system set open_cursors=1000 sid='*' scope=both;
alter system set processes=150 scope=spfile;
alter system set filesystemio_options=asynch scope=spfile;
alter system set disk_asynch_io=true scope=spfile;
EOF\""
echo "Waiting for Oracle to restart after configuration..."
$CONTAINER_CLI stop oracle
$CONTAINER_CLI start oracle
HEALTHSTATUS=
until [ "$HEALTHSTATUS" == "healthy" ];
do
echo "Waiting for Oracle to start..."
sleep 5;
# On WSL, health-checks intervals don't work for Podman, so run them manually
if command -v podman > /dev/null; then
$CONTAINER_CLI healthcheck run oracle > /dev/null
fi
HEALTHSTATUS="`$CONTAINER_CLI inspect -f $HEALTCHECK_PATH oracle`"
HEALTHSTATUS=${HEALTHSTATUS##+( )} #Remove longest matching series of spaces from the front
HEALTHSTATUS=${HEALTHSTATUS%%+( )} #Remove longest matching series of spaces from the back
done
sleep 2;
echo "Oracle successfully started"
}
oracle() {
oracle_18
oracle_21
}
oracle_11() {
@ -404,7 +537,7 @@ oracle_11() {
--health-timeout 5s \
--health-retries 10 \
docker.io/gvenzl/oracle-xe:11.2.0.2-full
oracle_setup
oracle_setup_old
}
oracle_18() {
@ -433,46 +566,6 @@ oracle_21() {
oracle_setup
}
oracle_ee() {
#$CONTAINER_CLI login
$CONTAINER_CLI rm -f oracle || true
# We need to use the defaults
# sys as sysdba/Oradoc_db1
$CONTAINER_CLI run --name oracle -d -p 1521:1521 docker.io/store/oracle/database-enterprise:12.2.0.1-slim
# Give the container some time to start
OUTPUT=
while [[ $OUTPUT != *"NLS_CALENDAR"* ]]; do
echo "Waiting for Oracle to start..."
sleep 10
OUTPUT=$($CONTAINER_CLI logs oracle)
done
echo "Oracle successfully started"
# We increase file sizes to avoid online resizes as that requires lots of CPU which is restricted in XE
$CONTAINER_CLI exec oracle bash -c "source /home/oracle/.bashrc; \$ORACLE_HOME/bin/sqlplus sys/Oradoc_db1@ORCLCDB as sysdba <<EOF
create user c##hibernate_orm_test identified by hibernate_orm_test container=all;
grant connect, resource, dba to c##hibernate_orm_test container=all;
alter database tempfile '/u02/app/oracle/oradata/ORCL/temp01.dbf' resize 400M;
alter database datafile '/u02/app/oracle/oradata/ORCL/system01.dbf' resize 1000M;
alter database datafile '/u02/app/oracle/oradata/ORCL/sysaux01.dbf' resize 900M;
alter database datafile '/u02/app/oracle/oradata/ORCL/undotbs01.dbf' resize 300M;
alter database add logfile group 4 '/u02/app/oracle/oradata/ORCL/redo04.log' size 500M reuse;
alter database add logfile group 5 '/u02/app/oracle/oradata/ORCL/redo05.log' size 500M reuse;
alter database add logfile group 6 '/u02/app/oracle/oradata/ORCL/redo06.log' size 500M reuse;
alter system switch logfile;
alter system switch logfile;
alter system switch logfile;
alter system checkpoint;
alter database drop logfile group 1;
alter database drop logfile group 2;
alter database drop logfile group 3;
alter session set container=ORCLPDB1;
alter database datafile '/u02/app/oracle/oradata/ORCLCDB/orclpdb1/system01.dbf' resize 500M;
alter database datafile '/u02/app/oracle/oradata/ORCLCDB/orclpdb1/sysaux01.dbf' resize 500M;
EOF"
}
hana() {
temp_dir=$(mktemp -d)
echo '{"master_password" : "H1bernate_test"}' >$temp_dir/password.json
@ -500,6 +593,54 @@ hana() {
}
cockroachdb() {
cockroachdb_22_1
}
cockroachdb_22_1() {
$CONTAINER_CLI rm -f cockroach || true
LOG_CONFIG="
sinks:
stderr:
channels: all
filter: ERROR
redact: false
exit-on-error: true
"
$CONTAINER_CLI run -d --name=cockroach -m 3g -p 26257:26257 -p 8080:8080 docker.io/cockroachdb/cockroach:v22.1.10 start-single-node \
--insecure --store=type=mem,size=640MiB --advertise-addr=localhost --log="$LOG_CONFIG"
OUTPUT=
while [[ $OUTPUT != *"CockroachDB node starting"* ]]; do
echo "Waiting for CockroachDB to start..."
sleep 10
# Note we need to redirect stderr to stdout to capture the logs
OUTPUT=$($CONTAINER_CLI logs cockroach 2>&1)
done
echo "Enabling experimental box2d operators and some optimized settings for running the tests"
#settings documented in https://www.cockroachlabs.com/docs/v21.2/local-testing.html#use-a-local-single-node-cluster-with-in-memory-storage
$CONTAINER_CLI exec cockroach bash -c "cat <<EOF | ./cockroach sql --insecure
SET CLUSTER SETTING sql.spatial.experimental_box2d_comparison_operators.enabled = on;
SET CLUSTER SETTING kv.raft_log.disable_synchronization_unsafe = true;
SET CLUSTER SETTING kv.range_merge.queue_interval = '50ms';
SET CLUSTER SETTING jobs.registry.interval.gc = '30s';
SET CLUSTER SETTING jobs.registry.interval.cancel = '180s';
SET CLUSTER SETTING jobs.retention_time = '15s';
SET CLUSTER SETTING schemachanger.backfiller.buffer_increment = '128 KiB';
SET CLUSTER SETTING sql.stats.automatic_collection.enabled = false;
SET CLUSTER SETTING kv.range_split.by_load_merge_delay = '5s';
SET CLUSTER SETTING timeseries.storage.enabled = false;
SET CLUSTER SETTING timeseries.storage.resolution_10s.ttl = '0s';
SET CLUSTER SETTING timeseries.storage.resolution_30m.ttl = '0s';
ALTER RANGE default CONFIGURE ZONE USING \"gc.ttlseconds\" = 10;
ALTER DATABASE system CONFIGURE ZONE USING \"gc.ttlseconds\" = 10;
ALTER DATABASE defaultdb CONFIGURE ZONE USING \"gc.ttlseconds\" = 10;
quit
EOF
"
echo "Cockroachdb successfully started"
}
cockroachdb_21_2() {
$CONTAINER_CLI rm -f cockroach || true
LOG_CONFIG="
sinks:
@ -547,26 +688,34 @@ if [ -z ${1} ]; then
echo "No db name provided"
echo "Provide one of:"
echo -e "\tcockroachdb"
echo -e "\tcockroachdb_22_1"
echo -e "\tcockroachdb_21_1"
echo -e "\tdb2"
echo -e "\tdb2_11_5"
echo -e "\tdb2_10_5"
echo -e "\tdb2_spatial"
echo -e "\tedb"
echo -e "\tedb_14"
echo -e "\tedb_10"
echo -e "\thana"
echo -e "\tmariadb"
echo -e "\tmariadb_10_9"
echo -e "\tmariadb_10_3"
echo -e "\tmssql"
echo -e "\tmssql_2022"
echo -e "\tmssql_2017"
echo -e "\tmysql"
echo -e "\tmysql_5_7"
echo -e "\tmysql_8_0"
echo -e "\tmysql_5_7"
echo -e "\toracle"
echo -e "\toracle_11"
echo -e "\toracle_18"
echo -e "\toracle_21"
echo -e "\toracle_ee"
echo -e "\tpostgis"
echo -e "\toracle_18"
echo -e "\toracle_11"
echo -e "\tpostgresql"
echo -e "\tpostgresql_14"
echo -e "\tpostgresql_13"
echo -e "\tpostgresql_10"
echo -e "\tpostgresql_9_5"
echo -e "\tpostgresql"
echo -e "\tsybase"
else
${1}

View File

@ -47,7 +47,7 @@ import org.hibernate.userguide.model.PhoneType;
import org.hibernate.userguide.model.WireTransferPayment;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.SkipForDialect;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.junit.Before;
@ -1534,8 +1534,8 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
}
@Test
@SkipForDialect(DerbyDialect.class)
@SkipForDialect(SybaseASEDialect.class)
@SkipForDialect(dialectClass = DerbyDialect.class)
@SkipForDialect(dialectClass = SybaseASEDialect.class)
public void test_hql_aggregate_functions_within_group_example() {
doInJPA(this::entityManagerFactory, entityManager -> {
//tag::hql-aggregate-functions-within-group-example[]
@ -1552,7 +1552,7 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
}
@Test
@SkipForDialect(value = DerbyDialect.class, comment = "See https://issues.apache.org/jira/browse/DERBY-2072")
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "See https://issues.apache.org/jira/browse/DERBY-2072")
public void test_hql_concat_function_example() {
doInJPA(this::entityManagerFactory, entityManager -> {
//tag::hql-concat-function-example[]
@ -1716,7 +1716,7 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
}
@Test
@SkipForDialect(value = CockroachDialect.class, comment = "https://github.com/cockroachdb/cockroach/issues/26710")
@SkipForDialect(dialectClass = CockroachDialect.class, reason = "https://github.com/cockroachdb/cockroach/issues/26710")
public void test_hql_sqrt_function_example() {
doInJPA(this::entityManagerFactory, entityManager -> {
//tag::hql-sqrt-function-example[]
@ -1731,8 +1731,8 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
}
@Test
@SkipForDialect(SQLServerDialect.class)
@SkipForDialect(value = DerbyDialect.class, comment = "Comparisons between 'DATE' and 'TIMESTAMP' are not supported")
@SkipForDialect(dialectClass = SQLServerDialect.class)
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Comparisons between 'DATE' and 'TIMESTAMP' are not supported")
public void test_hql_current_date_function_example() {
doInJPA(this::entityManagerFactory, entityManager -> {
//tag::hql-current-date-function-example[]
@ -1869,7 +1869,7 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
}
@Test
@SkipForDialect(SQLServerDialect.class)
@SkipForDialect(dialectClass = SQLServerDialect.class)
public void test_hql_str_function_example() {
doInJPA(this::entityManagerFactory, entityManager -> {
//tag::hql-str-function-example[]
@ -2006,7 +2006,7 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
}
@Test
@SkipForDialect(value = DerbyDialect.class, comment = "Comparisons between 'DATE' and 'TIMESTAMP' are not supported")
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Comparisons between 'DATE' and 'TIMESTAMP' are not supported")
public void test_hql_collection_expressions_example_8() {
doInJPA(this::entityManagerFactory, entityManager -> {
//tag::hql-collection-expressions-all-example[]
@ -3234,6 +3234,7 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
DialectChecks.SupportsSubqueryInOnClause.class,
DialectChecks.SupportsOrderByInCorrelatedSubquery.class
})
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "The lateral emulation for Oracle 11 would be very complex because nested correlation is unsupported")
public void test_hql_derived_join_example() {
doInJPA(this::entityManagerFactory, entityManager -> {

View File

@ -51,7 +51,7 @@ public class BooleanMappingTests {
assertThat(
jdbcMapping.getJdbcType().getJdbcTypeCode(),
// the implicit mapping will depend on the Dialect
isOneOf(Types.BOOLEAN, Types.BIT, Types.TINYINT)
isOneOf( Types.BOOLEAN, Types.BIT, Types.TINYINT, Types.SMALLINT )
);
}

View File

@ -11,6 +11,7 @@ import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase;
import org.junit.Test;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
@ -49,6 +50,7 @@ public class EnumerationCustomTypeTest extends BaseEntityManagerFunctionalTestCa
private String name;
@Type(org.hibernate.userguide.mapping.basic.GenderType.class)
@Column(length = 6)
public Gender gender;
//Getters and setters are omitted for brevity

48
edb/edb14.Dockerfile Normal file
View File

@ -0,0 +1,48 @@
FROM quay.io/enterprisedb/edb-postgres-advanced:14.5-3.2-postgis
USER root
# this 777 will be replaced by 700 at runtime (allows semi-arbitrary "--user" values)
RUN chown -R postgres:postgres /var/lib/edb && chmod 777 /var/lib/edb && rm /docker-entrypoint-initdb.d/10_postgis.sh
USER postgres
ENV LANG en_US.utf8
ENV PG_MAJOR 14
ENV PG_VERSION 14
ENV PGPORT 5444
ENV PGDATA /var/lib/edb/as$PG_MAJOR/data/
VOLUME /var/lib/edb/as$PG_MAJOR/data/
COPY docker-entrypoint.sh /usr/local/bin/
ENTRYPOINT ["docker-entrypoint.sh"]
# We set the default STOPSIGNAL to SIGINT, which corresponds to what PostgreSQL
# calls "Fast Shutdown mode" wherein new connections are disallowed and any
# in-progress transactions are aborted, allowing PostgreSQL to stop cleanly and
# flush tables to disk, which is the best compromise available to avoid data
# corruption.
#
# Users who know their applications do not keep open long-lived idle connections
# may way to use a value of SIGTERM instead, which corresponds to "Smart
# Shutdown mode" in which any existing sessions are allowed to finish and the
# server stops when all sessions are terminated.
#
# See https://www.postgresql.org/docs/12/server-shutdown.html for more details
# about available PostgreSQL server shutdown signals.
#
# See also https://www.postgresql.org/docs/12/server-start.html for further
# justification of this as the default value, namely that the example (and
# shipped) systemd service files use the "Fast Shutdown mode" for service
# termination.
#
STOPSIGNAL SIGINT
#
# An additional setting that is recommended for all users regardless of this
# value is the runtime "--stop-timeout" (or your orchestrator/runtime's
# equivalent) for controlling how long to wait between sending the defined
# STOPSIGNAL and sending SIGKILL (which is likely to cause data corruption).
#
# The default in most runtimes (such as Docker) is 10 seconds, and the
# documentation at https://www.postgresql.org/docs/12/server-start.html notes
# that even 90 seconds may not be long enough in many instances.
EXPOSE 5444
CMD ["postgres"]

View File

@ -29,6 +29,14 @@ ext {
'connection.init_sql' : ''
],
derby : [
'db.dialect' : 'org.hibernate.dialect.DerbyDialect',
'jdbc.driver': 'org.apache.derby.iapi.jdbc.AutoloadedDriver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
'jdbc.url' : 'jdbc:derby:target/tmp/derby/hibernate_orm_test;databaseName=hibernate_orm_test;create=true',
'connection.init_sql' : ''
],
derby_old : [
'db.dialect' : 'org.hibernate.dialect.DerbyDialect',
'jdbc.driver': 'org.apache.derby.jdbc.EmbeddedDriver',
'jdbc.user' : 'hibernate_orm_test',
@ -45,15 +53,6 @@ ext {
'jdbc.url' : 'jdbc:postgresql://' + dbHost + '/hibernate_orm_test?preparedStatementCacheQueries=0',
'connection.init_sql' : ''
],
pgsql_docker : [
'db.dialect' : 'org.hibernate.dialect.PostgreSQLDialect',
'jdbc.driver': 'org.postgresql.Driver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
// Disable prepared statement caching due to https://www.postgresql.org/message-id/CAEcMXhmmRd4-%2BNQbnjDT26XNdUoXdmntV9zdr8%3DTu8PL9aVCYg%40mail.gmail.com
'jdbc.url' : 'jdbc:postgresql://' + dbHost + '/hibernate_orm_test?preparedStatementCacheQueries=0',
'connection.init_sql' : ''
],
pgsql_ci : [
'db.dialect' : 'org.hibernate.dialect.PostgreSQLDialect',
'jdbc.driver': 'org.postgresql.Driver',
@ -81,15 +80,6 @@ ext {
'jdbc.url' : 'jdbc:jtds:sybase://' + dbHost + ':5000/hibernate_orm_test;maxStatements=0;cacheMetaData=false',
'connection.init_sql' : 'set ansinull on'
],
sybase_ci_legacynull : [
'db.dialect' : 'org.hibernate.dialect.SybaseASEDialect',
'jdbc.driver': 'net.sourceforge.jtds.jdbc.Driver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
// Disable prepared statement caching to avoid issues with changing schemas
'jdbc.url' : 'jdbc:jtds:sybase://' + dbHost + ':5000/hibernate_orm_test;maxStatements=0;cacheMetaData=false',
'connection.init_sql' : ''
],
mysql : [
'db.dialect' : 'org.hibernate.dialect.MySQLDialect',
'jdbc.driver': 'com.mysql.cj.jdbc.Driver',
@ -98,14 +88,6 @@ ext {
'jdbc.url' : 'jdbc:mysql://' + dbHost + '/hibernate_orm_test',
'connection.init_sql' : ''
],
mysql_docker : [
'db.dialect' : 'org.hibernate.dialect.MySQLDialect',
'jdbc.driver': 'com.mysql.cj.jdbc.Driver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
'jdbc.url' : 'jdbc:mysql://' + dbHost + '/hibernate_orm_test?allowPublicKeyRetrieval=true',
'connection.init_sql' : ''
],
mysql_ci : [
'db.dialect' : 'org.hibernate.dialect.MySQLDialect',
'jdbc.driver': 'com.mysql.cj.jdbc.Driver',
@ -114,15 +96,6 @@ ext {
'jdbc.url' : 'jdbc:mysql://' + dbHost + '/hibernate_orm_test?allowPublicKeyRetrieval=true',
'connection.init_sql' : ''
],
// uses docker mysql_8_0
mysql8_spatial_ci: [
'db.dialect' : 'org.hibernate.spatial.dialect.mysql.MySQL8SpatialDialect',
'jdbc.driver': 'com.mysql.cj.jdbc.Driver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
'jdbc.url' : 'jdbc:mysql://' + dbHost + '/hibernate_orm_test?allowPublicKeyRetrieval=true',
'connection.init_sql' : ''
],
mariadb : [
'db.dialect' : 'org.hibernate.dialect.MariaDBDialect',
'jdbc.driver': 'org.mariadb.jdbc.Driver',
@ -139,14 +112,6 @@ ext {
'jdbc.url' : 'jdbc:mariadb://' + dbHost + '/hibernate_orm_test',
'connection.init_sql' : ''
],
mariadb_spatial_ci : [
'db.dialect' : 'org.hibernate.spatial.dialect.mariadb.MariaDB103SpatialDialect',
'jdbc.driver': 'org.mariadb.jdbc.Driver',
'jdbc.user' : 'root',
'jdbc.pass' : 'hibernate_orm_test',
'jdbc.url' : 'jdbc:mariadb://' + dbHost + '/hibernate_orm_test',
'connection.init_sql' : ''
],
tidb : [
'db.dialect' : 'org.hibernate.dialect.TiDBDialect',
'jdbc.driver': 'com.mysql.jdbc.Driver',
@ -155,23 +120,6 @@ ext {
'jdbc.url' : 'jdbc:mysql://' + dbHost + '/hibernate_orm_test',
'connection.init_sql' : ''
],
tidb_ci5 : [
'db.dialect' : 'org.hibernate.dialect.TiDBDialect',
'jdbc.driver': 'com.mysql.jdbc.Driver',
'jdbc.user' : 'root',
'jdbc.pass' : '',
'jdbc.url' : 'jdbc:mysql://' + dbHost + ':4000/test',
'connection.init_sql' : ''
],
postgis : [
'db.dialect' : 'org.hibernate.spatial.dialect.postgis.PostgisPG95Dialect',
'jdbc.driver': 'org.postgresql.Driver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
// Disable prepared statement caching due to https://www.postgresql.org/message-id/CAEcMXhmmRd4-%2BNQbnjDT26XNdUoXdmntV9zdr8%3DTu8PL9aVCYg%40mail.gmail.com
'jdbc.url' : 'jdbc:postgresql://' + dbHost + '/hibernate_orm_test?preparedStatementCacheQueries=0',
'connection.init_sql' : ''
],
oracle : [
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
'jdbc.driver': 'oracle.jdbc.OracleDriver',
@ -180,31 +128,6 @@ ext {
'jdbc.url' : 'jdbc:oracle:thin:@' + dbHost + ':1521/xe',
'connection.init_sql' : ''
],
oracle_jenkins : [
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
'jdbc.driver': 'oracle.jdbc.OracleDriver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
'jdbc.url' : 'jdbc:oracle:thin:@hibernate-testing-oracle-se.ccuzkqo3zqzq.us-east-1.rds.amazonaws.com:1521:ORCL',
'connection.init_sql' : ''
],
oracle_rds : [
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
'jdbc.driver': 'oracle.jdbc.OracleDriver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
'jdbc.url' : 'jdbc:oracle:thin:@localhost:1521:ORCL',
'connection.init_sql' : ''
],
// Use ./docker_db.sh oracle_ee to start the database
oracle_docker : [
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
'jdbc.driver': 'oracle.jdbc.OracleDriver',
'jdbc.user' : 'c##hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
'jdbc.url' : 'jdbc:oracle:thin:@' + dbHost + ':1521/ORCLPDB1.localdomain',
'connection.init_sql' : ''
],
oracle_ci : [
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
'jdbc.driver': 'oracle.jdbc.OracleDriver',
@ -213,14 +136,6 @@ ext {
'jdbc.url' : 'jdbc:oracle:thin:@' + dbHost + ':1521:XE',
'connection.init_sql' : ''
],
oracle_spatial_ci : [
'db.dialect' : 'org.hibernate.spatial.dialect.oracle.OracleSpatial10gDialect',
'jdbc.driver': 'oracle.jdbc.OracleDriver',
'jdbc.user' : 'SYSTEM',
'jdbc.pass' : 'Oracle18',
'jdbc.url' : 'jdbc:oracle:thin:@' + dbHost + ':1521:XE',
'connection.init_sql' : ''
],
mssql : [
'db.dialect' : 'org.hibernate.dialect.SQLServerDialect',
'jdbc.driver': 'com.microsoft.sqlserver.jdbc.SQLServerDriver',
@ -250,7 +165,7 @@ ext {
'jdbc.driver': 'com.ibm.db2.jcc.DB2Driver',
'jdbc.user' : 'db2inst1',
'jdbc.pass' : 'db2inst1-pwd',
'jdbc.url' : 'jdbc:db2://' + dbHost + ':50000/hibern8',
'jdbc.url' : 'jdbc:db2://' + dbHost + ':50000/orm_test',
'connection.init_sql' : ''
],
db2_ci : [
@ -261,23 +176,6 @@ ext {
'jdbc.url' : 'jdbc:db2://' + dbHost + ':50000/orm_test',
'connection.init_sql' : ''
],
db2_spatial_ci : [
'db.dialect' : 'org.hibernate.spatial.dialect.db2.DB2SpatialDialect',
'jdbc.driver': 'com.ibm.db2.jcc.DB2Driver',
'jdbc.user' : 'orm_test',
'jdbc.pass' : 'orm_test',
'jdbc.url' : 'jdbc:db2://' + dbHost + ':50000/orm_test',
'connection.init_sql' : ''
],
hana : [
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
'jdbc.driver': 'com.sap.db.jdbc.Driver',
'jdbc.user' : 'HIBERNATE_TEST',
'jdbc.pass' : 'H1bernate_test',
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
'jdbc.url' : 'jdbc:sap://' + dbHost + ':30015/?statementCacheSize=0',
'connection.init_sql' : ''
],
hana_cloud : [
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
'jdbc.driver': 'com.sap.db.jdbc.Driver',
@ -287,24 +185,6 @@ ext {
'jdbc.url' : 'jdbc:sap://' + dbHost + ':443/?encrypt=true&validateCertificate=false&statementCacheSize=0',
'connection.init_sql' : ''
],
hana_jenkins : [
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
'jdbc.driver': 'com.sap.db.jdbc.Driver',
'jdbc.user' : 'HIBERNATE_TEST',
'jdbc.pass' : 'H1bernate_test',
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
'jdbc.url' : 'jdbc:sap://' + dbHost + ':39015/?statementCacheSize=0',
'connection.init_sql' : ''
],
hana_docker : [
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
'jdbc.driver': 'com.sap.db.jdbc.Driver',
'jdbc.user' : 'SYSTEM',
'jdbc.pass' : 'H1bernate_test',
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
'jdbc.url' : 'jdbc:sap://' + dbHost + ':39017/?statementCacheSize=0',
'connection.init_sql' : ''
],
hana_ci : [
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
'jdbc.driver': 'com.sap.db.jdbc.Driver',
@ -314,15 +194,6 @@ ext {
'jdbc.url' : 'jdbc:sap://' + dbHost + ':39017/?statementCacheSize=0',
'connection.init_sql' : ''
],
hana_spatial_ci : [
'db.dialect' : 'org.hibernate.spatial.dialect.hana.HANASpatialDialect',
'jdbc.driver': 'com.sap.db.jdbc.Driver',
'jdbc.user' : 'SYSTEM',
'jdbc.pass' : 'H1bernate_test',
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
'jdbc.url' : 'jdbc:sap://' + dbHost + ':39017/?statementCacheSize=0',
'connection.init_sql' : ''
],
cockroachdb : [
'db.dialect' : 'org.hibernate.dialect.CockroachDialect',
// CockroachDB uses the same pgwire protocol as PostgreSQL, so the driver is the same.
@ -333,16 +204,6 @@ ext {
'jdbc.url' : 'jdbc:postgresql://' + dbHost + ':26257/defaultdb?sslmode=disable&preparedStatementCacheQueries=0',
'connection.init_sql' : ''
],
cockroachdb_spatial : [
'db.dialect' : 'org.hibernate.spatial.dialect.cockroachdb.CockroachDB202SpatialDialect',
// CockroachDB uses the same pgwire protocol as PostgreSQL, so the driver is the same.
'jdbc.driver': 'org.postgresql.Driver',
'jdbc.user' : 'root',
'jdbc.pass' : '',
// Disable prepared statement caching due to https://www.postgresql.org/message-id/CAEcMXhmmRd4-%2BNQbnjDT26XNdUoXdmntV9zdr8%3DTu8PL9aVCYg%40mail.gmail.com
'jdbc.url' : 'jdbc:postgresql://'+ dbHost +'localhost:26257/defaultdb?sslmode=disable&preparedStatementCacheQueries=0',
'connection.init_sql' : ''
],
firebird : [
'db.dialect' : 'org.hibernate.dialect.FirebirdDialect',
'jdbc.driver': 'org.firebirdsql.jdbc.FBDriver',
@ -362,5 +223,9 @@ def processTestResourcesTask = project.tasks.findByName( 'processTestResources'
if ( processTestResourcesTask != null ) {
processTestResourcesTask.inputs.property( 'db', db )
processTestResourcesTask.inputs.property( 'dbHost', dbHost )
// processTestResourcesTask.inputs.property( "gradle.libs.versions.h2", project.getProperty( "gradle.libs.versions.h2", "2.1.214" ) )
// processTestResourcesTask.inputs.property( "gradle.libs.versions.h2gis", project.getProperty( "gradle.libs.versions.h2gis", "2.1.0" ) )
// processTestResourcesTask.inputs.property( "gradle.libs.versions.hsqldb", project.getProperty( "gradle.libs.versions.hsqldb", "2.7.1" ) )
// processTestResourcesTask.inputs.property( "gradle.libs.versions.derby", project.getProperty( "gradle.libs.versions.derby", "10.15.2.0" ) )
processTestResourcesTask.filter( ReplaceTokens, tokens: dbBundle[db] )
}

View File

@ -14,6 +14,7 @@ import java.sql.Types;
import org.hibernate.LockOptions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.OracleDialect;
@ -21,6 +22,8 @@ import org.hibernate.dialect.function.CastingConcatFunction;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.CountFunction;
import org.hibernate.dialect.function.DB2FormatEmulation;
import org.hibernate.dialect.function.DB2PositionFunction;
import org.hibernate.dialect.function.DB2SubstringFunction;
import org.hibernate.dialect.identity.DB2IdentityColumnSupport;
import org.hibernate.dialect.identity.IdentityColumnSupport;
import org.hibernate.dialect.pagination.DB2LimitHandler;
@ -151,6 +154,11 @@ public class DB2LegacyDialect extends Dialect {
return 0;
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return getDB2Version().isBefore( 11 ) ? Types.SMALLINT : Types.BOOLEAN;
}
@Override
protected String columnType(int sqlTypeCode) {
switch ( sqlTypeCode ) {
@ -213,6 +221,11 @@ public class DB2LegacyDialect extends Dialect {
return 31;
}
@Override
protected boolean supportsPredicateAsExpression() {
return getDB2Version().isSameOrAfter( 11 );
}
@Override
public boolean supportsDistinctFromPredicate() {
return getDB2Version().isSameOrAfter( 11, 1 );
@ -240,18 +253,14 @@ public class DB2LegacyDialect extends Dialect {
.setInvariantType(
queryEngine.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.STRING )
)
.setArgumentCountBetween( 2, 4 )
.setParameterTypes(FunctionParameterType.STRING, FunctionParameterType.INTEGER, FunctionParameterType.INTEGER, FunctionParameterType.ANY)
.setArgumentListSignature( "(STRING string, INTEGER start[, INTEGER length[, units]])" )
.register();
queryEngine.getSqmFunctionRegistry().namedDescriptorBuilder( "substring" )
.setInvariantType(
queryEngine.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.STRING )
)
.setArgumentCountBetween( 2, 4 )
.setParameterTypes(FunctionParameterType.STRING, FunctionParameterType.INTEGER, FunctionParameterType.INTEGER, FunctionParameterType.ANY)
.setArgumentListSignature( "(STRING string{ INTEGER from|,} start[{ INTEGER for|,} length[, units]])" )
.setArgumentCountBetween( 2, 3 )
.setParameterTypes(FunctionParameterType.STRING, FunctionParameterType.INTEGER, FunctionParameterType.INTEGER)
.setArgumentListSignature( "(STRING string, INTEGER start[, INTEGER length])" )
.register();
queryEngine.getSqmFunctionRegistry().register(
"substring",
new DB2SubstringFunction( queryEngine.getTypeConfiguration() )
);
functionFactory.translate();
functionFactory.bitand();
functionFactory.bitor();
@ -269,18 +278,38 @@ public class DB2LegacyDialect extends Dialect {
functionFactory.octetLength();
functionFactory.ascii();
functionFactory.char_chr();
functionFactory.position();
functionFactory.trunc();
functionFactory.truncate();
functionFactory.insert();
functionFactory.overlayCharacterLength_overlay();
functionFactory.median();
functionFactory.stddev();
functionFactory.stddevPopSamp();
functionFactory.varPopSamp();
functionFactory.regrLinearRegressionAggregates();
functionFactory.variance();
functionFactory.varianceSamp();
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
if ( getDB2Version().isSameOrAfter( 11 ) ) {
functionFactory.position();
functionFactory.overlayLength_overlay( false );
functionFactory.median();
functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.stddevPopSamp();
functionFactory.varPopSamp();
functionFactory.varianceSamp();
}
else {
// Before version 11, the position function required the use of the code units
queryEngine.getSqmFunctionRegistry().register(
"position",
new DB2PositionFunction( queryEngine.getTypeConfiguration() )
);
// Before version 11, the overlay function required the use of the code units
functionFactory.overlayLength_overlay( true );
// ordered set aggregate functions are only available as of version 11, and we can't reasonably emulate them
// so no percent_rank, cume_dist, median, mode, percentile_cont or percentile_disc
queryEngine.getSqmFunctionRegistry().registerAlternateKey( "stddev_pop", "stddev" );
functionFactory.stddevSamp_sumCount();
queryEngine.getSqmFunctionRegistry().registerAlternateKey( "var_pop", "variance" );
functionFactory.varSamp_sumCount();
}
functionFactory.addYearsMonthsDaysHoursMinutesSeconds();
functionFactory.yearsMonthsDaysHoursMinutesSecondsBetween();
functionFactory.dateTrunc();
@ -335,10 +364,6 @@ public class DB2LegacyDialect extends Dialect {
functionFactory.windowFunctions();
if ( getDB2Version().isSameOrAfter( 9, 5 ) ) {
functionFactory.listagg( null );
if ( getDB2Version().isSameOrAfter( 11, 1 ) ) {
functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
}
}
}
@ -362,50 +387,53 @@ public class DB2LegacyDialect extends Dialect {
@Override
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
if ( getDB2Version().isBefore( 11 ) ) {
return DB2Dialect.timestampdiffPatternV10( unit, fromTemporalType, toTemporalType );
}
StringBuilder pattern = new StringBuilder();
boolean castFrom = fromTemporalType != TemporalType.TIMESTAMP && !unit.isDateUnit();
boolean castTo = toTemporalType != TemporalType.TIMESTAMP && !unit.isDateUnit();
switch (unit) {
switch ( unit ) {
case NATIVE:
case NANOSECOND:
pattern.append("(seconds_between(");
pattern.append( "(seconds_between(" );
break;
//note: DB2 does have weeks_between()
case MONTH:
case QUARTER:
// the months_between() function results
// in a non-integral value, so trunc() it
pattern.append("trunc(months_between(");
pattern.append( "trunc(months_between(" );
break;
default:
pattern.append("?1s_between(");
pattern.append( "?1s_between(" );
}
if (castTo) {
pattern.append("cast(?3 as timestamp)");
if ( castTo ) {
pattern.append( "cast(?3 as timestamp)" );
}
else {
pattern.append("?3");
pattern.append( "?3" );
}
pattern.append(",");
if (castFrom) {
pattern.append("cast(?2 as timestamp)");
pattern.append( "," );
if ( castFrom ) {
pattern.append( "cast(?2 as timestamp)" );
}
else {
pattern.append("?2");
pattern.append( "?2" );
}
pattern.append(")");
switch (unit) {
pattern.append( ")" );
switch ( unit ) {
case NATIVE:
pattern.append("+(microsecond(?3)-microsecond(?2))/1e6)");
pattern.append( "+(microsecond(?3)-microsecond(?2))/1e6)" );
break;
case NANOSECOND:
pattern.append("*1e9+(microsecond(?3)-microsecond(?2))*1e3)");
pattern.append( "*1e9+(microsecond(?3)-microsecond(?2))*1e3)" );
break;
case MONTH:
pattern.append(")");
pattern.append( ")" );
break;
case QUARTER:
pattern.append("/3)");
pattern.append( "/3)" );
break;
}
return pattern.toString();
@ -692,7 +720,13 @@ public class DB2LegacyDialect extends Dialect {
@Override
public void appendBinaryLiteral(SqlAppender appender, byte[] bytes) {
appender.appendSql( "BX'" );
if ( getDB2Version().isSameOrAfter( 11 ) ) {
appender.appendSql( "BX'" );
}
else {
// This should be fine on DB2 prior to 10
appender.appendSql( "X'" );
}
PrimitiveByteArrayJavaType.INSTANCE.appendString( appender, bytes );
appender.appendSql( '\'' );
}
@ -811,13 +845,18 @@ public class DB2LegacyDialect extends Dialect {
@Override
public String extractPattern(TemporalUnit unit) {
if ( unit == TemporalUnit.WEEK ) {
// Not sure why, but `extract(week from '2019-05-27')` wrongly returns 21 and week_iso behaves correct
return "week_iso(?2)";
}
else {
return super.extractPattern( unit );
switch ( unit ) {
case WEEK:
// Not sure why, but `extract(week from '2019-05-27')` wrongly returns 21 and week_iso behaves correct
return "week_iso(?2)";
case DAY_OF_YEAR:
return "dayofyear(?2)";
case DAY_OF_WEEK:
return "dayofweek(?2)";
case QUARTER:
return "quarter(?2)";
}
return super.extractPattern( unit );
}
@Override

View File

@ -30,11 +30,13 @@ import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableReferenceJoin;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.insert.InsertStatement;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
import org.hibernate.sql.ast.tree.select.QueryGroup;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.ast.tree.update.UpdateStatement;
import org.hibernate.sql.exec.spi.JdbcOperation;
@ -45,6 +47,9 @@ import org.hibernate.sql.exec.spi.JdbcOperation;
*/
public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAstTranslator<T> {
// We have to track whether we are in a later query for applying lateral during window emulation
private boolean inLateral;
public DB2LegacySqlAstTranslator(SessionFactoryImplementor sessionFactory, Statement statement) {
super( sessionFactory, statement );
}
@ -203,24 +208,60 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
}
protected boolean shouldEmulateFetchClause(QueryPart queryPart) {
// Percent fetches or ties fetches aren't supported in DB2
// According to LegacyDB2LimitHandler, variable limit also isn't supported before 11.1
// Check if current query part is already row numbering to avoid infinite recursion
return getQueryPartForRowNumbering() != queryPart && (
useOffsetFetchClause( queryPart ) && !isRowsOnlyFetchClauseType( queryPart )
|| getDB2Version().isBefore( 11, 1 ) && ( queryPart.isRoot() && hasLimit() || !( queryPart.getFetchClauseExpression() instanceof Literal ) )
);
if ( getQueryPartForRowNumbering() == queryPart ) {
return false;
}
// Percent fetches or ties fetches aren't supported in DB2
if ( useOffsetFetchClause( queryPart ) && !isRowsOnlyFetchClauseType( queryPart ) ) {
return true;
}
// According to LegacyDB2LimitHandler, variable limit also isn't supported before 11.1
return getDB2Version().isBefore( 11, 1 )
&& queryPart.getFetchClauseExpression() != null
&& !( queryPart.getFetchClauseExpression() instanceof Literal );
}
protected boolean supportsOffsetClause() {
return getDB2Version().isSameOrAfter( 11, 1 );
}
@Override
public void visitQueryPartTableReference(QueryPartTableReference tableReference) {
final boolean oldLateral = inLateral;
inLateral = tableReference.isLateral();
super.visitQueryPartTableReference( tableReference );
inLateral = oldLateral;
}
@Override
public void visitSelectStatement(SelectStatement statement) {
if ( getQueryPartForRowNumbering() == statement.getQueryPart() && inLateral ) {
appendSql( "lateral " );
}
super.visitSelectStatement( statement );
}
@Override
protected void emulateFetchOffsetWithWindowFunctionsVisitQueryPart(QueryPart queryPart) {
if ( inLateral ) {
appendSql( "lateral " );
final boolean oldLateral = inLateral;
inLateral = false;
super.emulateFetchOffsetWithWindowFunctionsVisitQueryPart( queryPart );
inLateral = oldLateral;
}
else {
super.emulateFetchOffsetWithWindowFunctionsVisitQueryPart( queryPart );
}
}
@Override
public void visitQueryGroup(QueryGroup queryGroup) {
final boolean emulateFetchClause = shouldEmulateFetchClause( queryGroup );
if ( emulateFetchClause || !supportsOffsetClause() && hasOffset( queryGroup ) ) {
emulateFetchOffsetWithWindowFunctions( queryGroup, emulateFetchClause );
if ( emulateFetchClause ||
getQueryPartForRowNumbering() != queryGroup && !supportsOffsetClause() && hasOffset( queryGroup ) ) {
emulateFetchOffsetWithWindowFunctions( queryGroup, true );
}
else {
super.visitQueryGroup( queryGroup );
@ -230,8 +271,9 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
@Override
public void visitQuerySpec(QuerySpec querySpec) {
final boolean emulateFetchClause = shouldEmulateFetchClause( querySpec );
if ( emulateFetchClause || !supportsOffsetClause() && hasOffset( querySpec ) ) {
emulateFetchOffsetWithWindowFunctions( querySpec, emulateFetchClause );
if ( emulateFetchClause ||
getQueryPartForRowNumbering() != querySpec && !supportsOffsetClause() && hasOffset( querySpec ) ) {
emulateFetchOffsetWithWindowFunctions( querySpec, true );
}
else {
super.visitQuerySpec( querySpec );
@ -253,6 +295,26 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
}
}
@Override
protected void renderOffsetExpression(Expression offsetExpression) {
if ( supportsParameterOffsetFetchExpression() ) {
super.renderOffsetExpression( offsetExpression );
}
else {
renderExpressionAsLiteral( offsetExpression, getJdbcParameterBindings() );
}
}
@Override
protected void renderFetchExpression(Expression fetchExpression) {
if ( supportsParameterOffsetFetchExpression() ) {
super.renderFetchExpression( fetchExpression );
}
else {
renderExpressionAsLiteral( fetchExpression, getJdbcParameterBindings() );
}
}
@Override
protected void visitDeleteStatementOnly(DeleteStatement statement) {
final boolean closeWrapper = renderReturningClause( statement );
@ -376,4 +438,8 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
return this.getDialect().getVersion();
}
protected boolean supportsParameterOffsetFetchExpression() {
return getDB2Version().isSameOrAfter( 11 );
}
}

View File

@ -287,6 +287,7 @@ public class H2LegacyDialect extends Dialect {
functionFactory.radians();
functionFactory.degrees();
functionFactory.log10();
functionFactory.mod_operator();
functionFactory.rand();
functionFactory.truncate();
functionFactory.soundex();

View File

@ -230,6 +230,12 @@ public class H2LegacySqlAstTranslator<T extends JdbcOperation> extends AbstractS
return getDialect().getVersion().isSameOrAfter( 1, 4, 197 );
}
@Override
protected boolean supportsRowValueConstructorDistinctFromSyntax() {
// Seems that before, this was buggy
return getDialect().getVersion().isSameOrAfter( 1, 4, 200 );
}
@Override
protected boolean supportsNullPrecedence() {
// Support for nulls clause in listagg was added in 2.0

View File

@ -85,6 +85,25 @@ public class MariaDBLegacySqlAstTranslator<T extends JdbcOperation> extends Abst
return useOffsetFetchClause( queryPart ) && getQueryPartForRowNumbering() != queryPart && supportsWindowFunctions() && !isRowsOnlyFetchClauseType( queryPart );
}
@Override
protected boolean supportsSimpleQueryGrouping() {
return getDialect().getVersion().isSameOrAfter( 10, 4 );
}
@Override
protected boolean shouldEmulateLateralWithIntersect(QueryPart queryPart) {
// Intersect emulation requires nested correlation when no simple query grouping is possible
// and the query has an offset/fetch clause, so we have to disable the emulation in this case,
// because nested correlation is not supported though
return supportsSimpleQueryGrouping() || !queryPart.hasOffsetOrFetchClause();
}
@Override
protected boolean supportsNestedSubqueryCorrelation() {
// It seems it doesn't support it
return false;
}
@Override
public void visitQueryGroup(QueryGroup queryGroup) {
if ( shouldEmulateFetchClause( queryGroup ) ) {

View File

@ -170,6 +170,16 @@ public class MySQLLegacySqlAstTranslator<T extends JdbcOperation> extends Abstra
return getDialect().getVersion().isSameOrAfter( 8 );
}
@Override
protected boolean supportsSimpleQueryGrouping() {
return getDialect().getVersion().isSameOrAfter( 8 );
}
@Override
protected boolean supportsNestedSubqueryCorrelation() {
return false;
}
@Override
protected String getFromDual() {
return " from dual";

View File

@ -200,22 +200,8 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
querySpec,
true, // we need select aliases to avoid ORA-00918: column ambiguously defined
() -> {
final QueryPart currentQueryPart = getQueryPartStack().getCurrent();
final boolean needsWrapper;
if ( currentQueryPart instanceof QueryGroup ) {
// visitQuerySpec will add the select wrapper
needsWrapper = !currentQueryPart.hasOffsetOrFetchClause();
}
else {
needsWrapper = true;
}
if ( needsWrapper ) {
appendSql( "select * from (" );
}
super.visitQuerySpec( querySpec );
if ( needsWrapper ) {
appendSql( ')' );
}
appendSql( "select * from " );
emulateFetchOffsetWithWindowFunctionsVisitQueryPart( querySpec );
appendSql( " where rownum<=" );
final Stack<Clause> clauseStack = getClauseStack();
clauseStack.push( Clause.WHERE );
@ -499,21 +485,4 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
return getDialect().supportsFetchClause( FetchClauseType.ROWS_ONLY );
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
final BinaryArithmeticOperator operator = arithmeticExpression.getOperator();
if ( operator == BinaryArithmeticOperator.MODULO ) {
append( "mod" );
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( ',' );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
return;
}
else {
super.visitBinaryArithmeticExpression( arithmeticExpression );
}
}
}

View File

@ -524,7 +524,8 @@ public class PostgreSQLLegacyDialect extends Dialect {
functionFactory.degrees();
functionFactory.trunc();
functionFactory.log();
if ( getVersion().isSameOrAfter(12) ) {
functionFactory.mod_operator();
if ( getVersion().isSameOrAfter( 12 ) ) {
functionFactory.log10();
functionFactory.tanh();
functionFactory.sinh();

View File

@ -12,6 +12,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteMaterialization;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
@ -212,4 +213,13 @@ public class PostgreSQLLegacySqlAstTranslator<T extends JdbcOperation> extends A
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
}

View File

@ -44,6 +44,7 @@ import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.TrimSpec;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
@ -309,6 +310,31 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
if ( getVersion().isSameOrAfter( 14 ) ) {
functionFactory.listagg_stringAggWithinGroup( "varchar(max)" );
}
if ( getVersion().isSameOrAfter( 16 ) ) {
functionFactory.leastGreatest();
}
}
@Override
public String trimPattern(TrimSpec specification, char character) {
if ( getVersion().isSameOrAfter( 16 ) ) {
switch ( specification ) {
case BOTH:
return character == ' '
? "trim(?1)"
: "trim('" + character + "' from ?1)";
case LEADING:
return character == ' '
? "ltrim(?1)"
: "ltrim(?1,'" + character + "')";
case TRAILING:
return character == ' '
? "rtrim(?1)"
: "rtrim(?1,'" + character + "')";
}
throw new UnsupportedOperationException( "Unsupported specification: " + specification );
}
return super.trimPattern( specification, character );
}
@Override
@ -397,6 +423,11 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
return getVersion().isSameOrAfter( 10 );
}
@Override
public boolean supportsDistinctFromPredicate() {
return getVersion().isSameOrAfter( 16 );
}
@Override
public char closeQuote() {
return ']';
@ -902,7 +933,7 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
@Override
public String[] getDropSchemaCommand(String schemaName) {
if ( getVersion().isSameOrAfter( 16 ) ) {
if ( getVersion().isSameOrAfter( 13 ) ) {
return new String[] { "drop schema if exists " + schemaName };
}
return super.getDropSchemaCommand( schemaName );

View File

@ -20,6 +20,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
@ -420,6 +421,15 @@ public class SQLServerLegacySqlAstTranslator<T extends JdbcOperation> extends Ab
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
@Override
protected boolean supportsRowValueConstructorSyntax() {
return false;

View File

@ -18,6 +18,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
@ -325,6 +326,15 @@ public class SybaseASELegacySqlAstTranslator<T extends JdbcOperation> extends Ab
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
@Override
public void visitColumnReference(ColumnReference columnReference) {
final String dmlTargetTableAlias = getDmlTargetTableAlias();

View File

@ -17,6 +17,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
@ -186,6 +187,15 @@ public class SybaseAnywhereSqlAstTranslator<T extends JdbcOperation> extends Abs
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
@Override
protected boolean supportsRowValueConstructorSyntax() {
return false;

View File

@ -17,6 +17,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
@ -148,6 +149,15 @@ public class SybaseLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
@Override
protected boolean supportsRowValueConstructorSyntax() {
return false;

View File

@ -379,11 +379,15 @@ public class MetadataBuildingProcess {
final ClassLoaderService classLoaderService = options.getServiceRegistry().getService( ClassLoaderService.class );
final TypeConfiguration typeConfiguration = bootstrapContext.getTypeConfiguration();
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
final TypeContributions typeContributions = () -> typeConfiguration;
// add Dialect contributed types
final Dialect dialect = options.getServiceRegistry().getService( JdbcServices.class ).getDialect();
dialect.contributeTypes( typeContributions, options.getServiceRegistry() );
final JdbcType dialectUuidDescriptor = jdbcTypeRegistry.findDescriptor( SqlTypes.UUID );
final JdbcType dialectArrayDescriptor = jdbcTypeRegistry.findDescriptor( SqlTypes.ARRAY );
final JdbcType dialectIntervalDescriptor = jdbcTypeRegistry.findDescriptor( SqlTypes.INTERVAL_SECOND );
// add TypeContributor contributed types.
for ( TypeContributor contributor : classLoaderService.loadJavaServices( TypeContributor.class ) ) {
@ -391,10 +395,14 @@ public class MetadataBuildingProcess {
}
// add fallback type descriptors
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
final int preferredSqlTypeCodeForUuid = ConfigurationHelper.getPreferredSqlTypeCodeForUuid( bootstrapContext.getServiceRegistry() );
if ( preferredSqlTypeCodeForUuid != SqlTypes.UUID ) {
jdbcTypeRegistry.addDescriptor( SqlTypes.UUID, jdbcTypeRegistry.getDescriptor( preferredSqlTypeCodeForUuid ) );
if ( jdbcTypeRegistry.findDescriptor( SqlTypes.UUID ) == dialectUuidDescriptor ) {
jdbcTypeRegistry.addDescriptor(
SqlTypes.UUID,
jdbcTypeRegistry.getDescriptor( preferredSqlTypeCodeForUuid )
);
}
}
else {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.UUID, SqlTypes.BINARY );
@ -404,7 +412,12 @@ public class MetadataBuildingProcess {
final int preferredSqlTypeCodeForArray = ConfigurationHelper.getPreferredSqlTypeCodeForArray( bootstrapContext.getServiceRegistry() );
if ( preferredSqlTypeCodeForArray != SqlTypes.ARRAY ) {
jdbcTypeRegistry.addDescriptor( SqlTypes.ARRAY, jdbcTypeRegistry.getDescriptor( preferredSqlTypeCodeForArray ) );
if ( jdbcTypeRegistry.findDescriptor( SqlTypes.ARRAY ) == dialectArrayDescriptor ) {
jdbcTypeRegistry.addDescriptor(
SqlTypes.ARRAY,
jdbcTypeRegistry.getDescriptor( preferredSqlTypeCodeForArray )
);
}
}
else if ( jdbcTypeRegistry.findDescriptor( SqlTypes.ARRAY ) == null ) {
// Fallback to VARBINARY
@ -413,7 +426,12 @@ public class MetadataBuildingProcess {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.INET, SqlTypes.VARBINARY );
final int preferredSqlTypeCodeForDuration = ConfigurationHelper.getPreferredSqlTypeCodeForDuration( bootstrapContext.getServiceRegistry() );
if ( preferredSqlTypeCodeForDuration != SqlTypes.INTERVAL_SECOND ) {
jdbcTypeRegistry.addDescriptor( SqlTypes.INTERVAL_SECOND, jdbcTypeRegistry.getDescriptor( preferredSqlTypeCodeForDuration ) );
if ( jdbcTypeRegistry.findDescriptor( SqlTypes.INTERVAL_SECOND ) == dialectIntervalDescriptor ) {
jdbcTypeRegistry.addDescriptor(
SqlTypes.INTERVAL_SECOND,
jdbcTypeRegistry.getDescriptor( preferredSqlTypeCodeForDuration )
);
}
}
else {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.INTERVAL_SECOND, SqlTypes.NUMERIC );

View File

@ -12,6 +12,8 @@ import org.hibernate.dialect.function.CastingConcatFunction;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.CountFunction;
import org.hibernate.dialect.function.DB2FormatEmulation;
import org.hibernate.dialect.function.DB2PositionFunction;
import org.hibernate.dialect.function.DB2SubstringFunction;
import org.hibernate.dialect.identity.DB2IdentityColumnSupport;
import org.hibernate.dialect.identity.IdentityColumnSupport;
import org.hibernate.dialect.pagination.DB2LimitHandler;
@ -193,6 +195,11 @@ public class DB2Dialect extends Dialect {
return 31;
}
@Override
protected boolean supportsPredicateAsExpression() {
return getDB2Version().isSameOrAfter( 11 );
}
@Override
public boolean supportsDistinctFromPredicate() {
return getDB2Version().isSameOrAfter( 11, 1 );
@ -222,18 +229,14 @@ public class DB2Dialect extends Dialect {
.setInvariantType(
queryEngine.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.STRING )
)
.setArgumentCountBetween( 2, 4 )
.setParameterTypes(FunctionParameterType.STRING, FunctionParameterType.INTEGER, FunctionParameterType.INTEGER, FunctionParameterType.ANY)
.setArgumentListSignature( "(STRING string, INTEGER start[, INTEGER length[, units]])" )
.register();
queryEngine.getSqmFunctionRegistry().namedDescriptorBuilder( "substring" )
.setInvariantType(
queryEngine.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.STRING )
)
.setArgumentCountBetween( 2, 4 )
.setParameterTypes(FunctionParameterType.STRING, FunctionParameterType.INTEGER, FunctionParameterType.INTEGER, FunctionParameterType.ANY)
.setArgumentListSignature( "(STRING string{ INTEGER from|,} start[{ INTEGER for|,} length[, units]])" )
.setArgumentCountBetween( 2, 3 )
.setParameterTypes(FunctionParameterType.STRING, FunctionParameterType.INTEGER, FunctionParameterType.INTEGER)
.setArgumentListSignature( "(STRING string, INTEGER start[, INTEGER length])" )
.register();
queryEngine.getSqmFunctionRegistry().register(
"substring",
new DB2SubstringFunction( queryEngine.getTypeConfiguration() )
);
functionFactory.translate();
functionFactory.bitand();
functionFactory.bitor();
@ -251,18 +254,39 @@ public class DB2Dialect extends Dialect {
functionFactory.octetLength();
functionFactory.ascii();
functionFactory.char_chr();
functionFactory.position();
functionFactory.trunc();
functionFactory.truncate();
functionFactory.insert();
functionFactory.overlayCharacterLength_overlay();
functionFactory.median();
functionFactory.characterLength_length( SqlAstNodeRenderingMode.DEFAULT );
functionFactory.stddev();
functionFactory.stddevPopSamp();
functionFactory.varPopSamp();
functionFactory.regrLinearRegressionAggregates();
functionFactory.variance();
functionFactory.varianceSamp();
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
if ( getDB2Version().isSameOrAfter( 11 ) ) {
functionFactory.position();
functionFactory.overlayLength_overlay( false );
functionFactory.median();
functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.stddevPopSamp();
functionFactory.varPopSamp();
functionFactory.varianceSamp();
}
else {
// Before version 11, the position function required the use of the code units
queryEngine.getSqmFunctionRegistry().register(
"position",
new DB2PositionFunction( queryEngine.getTypeConfiguration() )
);
// Before version 11, the overlay function required the use of the code units
functionFactory.overlayLength_overlay( true );
// ordered set aggregate functions are only available as of version 11, and we can't reasonably emulate them
// so no percent_rank, cume_dist, median, mode, percentile_cont or percentile_disc
queryEngine.getSqmFunctionRegistry().registerAlternateKey( "stddev_pop", "stddev" );
functionFactory.stddevSamp_sumCount();
queryEngine.getSqmFunctionRegistry().registerAlternateKey( "var_pop", "variance" );
functionFactory.varSamp_sumCount();
}
functionFactory.addYearsMonthsDaysHoursMinutesSeconds();
functionFactory.yearsMonthsDaysHoursMinutesSecondsBetween();
functionFactory.dateTrunc();
@ -316,10 +340,6 @@ public class DB2Dialect extends Dialect {
functionFactory.windowFunctions();
functionFactory.listagg( null );
if ( getDB2Version().isSameOrAfter( 11, 1 ) ) {
functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
}
}
@Override
@ -342,55 +362,96 @@ public class DB2Dialect extends Dialect {
@Override
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
if ( getDB2Version().isBefore( 11 ) ) {
return timestampdiffPatternV10( unit, fromTemporalType, toTemporalType );
}
StringBuilder pattern = new StringBuilder();
boolean castFrom = fromTemporalType != TemporalType.TIMESTAMP && !unit.isDateUnit();
boolean castTo = toTemporalType != TemporalType.TIMESTAMP && !unit.isDateUnit();
switch (unit) {
switch ( unit ) {
case NATIVE:
case NANOSECOND:
pattern.append("(seconds_between(");
pattern.append( "(seconds_between(" );
break;
//note: DB2 does have weeks_between()
case MONTH:
case QUARTER:
// the months_between() function results
// in a non-integral value, so trunc() it
pattern.append("trunc(months_between(");
pattern.append( "trunc(months_between(" );
break;
default:
pattern.append("?1s_between(");
pattern.append( "?1s_between(" );
}
if (castTo) {
pattern.append("cast(?3 as timestamp)");
if ( castTo ) {
pattern.append( "cast(?3 as timestamp)" );
}
else {
pattern.append("?3");
pattern.append( "?3" );
}
pattern.append(",");
if (castFrom) {
pattern.append("cast(?2 as timestamp)");
pattern.append( ',' );
if ( castFrom ) {
pattern.append( "cast(?2 as timestamp)" );
}
else {
pattern.append("?2");
pattern.append( "?2" );
}
pattern.append(")");
switch (unit) {
pattern.append( ')' );
switch ( unit ) {
case NATIVE:
pattern.append("+(microsecond(?3)-microsecond(?2))/1e6)");
pattern.append( "+(microsecond(?3)-microsecond(?2))/1e6)" );
break;
case NANOSECOND:
pattern.append("*1e9+(microsecond(?3)-microsecond(?2))*1e3)");
pattern.append( "*1e9+(microsecond(?3)-microsecond(?2))*1e3)" );
break;
case MONTH:
pattern.append(")");
pattern.append( ')' );
break;
case QUARTER:
pattern.append("/3)");
pattern.append( "/3)" );
break;
}
return pattern.toString();
}
public static String timestampdiffPatternV10(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
final boolean castFrom = fromTemporalType != TemporalType.TIMESTAMP && !unit.isDateUnit();
final boolean castTo = toTemporalType != TemporalType.TIMESTAMP && !unit.isDateUnit();
final String fromExpression = castFrom ? "cast(?2 as timestamp)" : "?2";
final String toExpression = castTo ? "cast(?3 as timestamp)" : "?3";
switch ( unit ) {
case NATIVE:
return "(select (days(t2)-days(t1))*86400+(midnight_seconds(t2)-midnight_seconds(t1))+(microsecond(t2)-microsecond(t1))/1e6 " +
"from lateral(values(" + fromExpression + ',' + toExpression + ")) as temp(t1,t2))";
case NANOSECOND:
return "(select (days(t2)-days(t1))*86400+(midnight_seconds(t2)-midnight_seconds(t1))*1e9+(microsecond(t2)-microsecond(t1))*1e3 " +
"from lateral(values(" + fromExpression + ',' + toExpression + ")) as temp(t1,t2))";
case SECOND:
return "(select (days(t2)-days(t1))*86400+(midnight_seconds(t2)-midnight_seconds(t1)) " +
"from lateral(values(" + fromExpression + ',' + toExpression + ")) as temp(t1,t2))";
case MINUTE:
return "(select (days(t2)-days(t1))*1440+(midnight_seconds(t2)-midnight_seconds(t1))/60 from " +
"lateral(values(" + fromExpression + ',' + toExpression + ")) as temp(t1,t2))";
case HOUR:
return "(select (days(t2)-days(t1))*24+(midnight_seconds(t2)-midnight_seconds(t1))/3600 " +
"from lateral(values(" + fromExpression + ',' + toExpression + ")) as temp(t1,t2))";
case YEAR:
return "(year(" + toExpression + ")-year(" + fromExpression + "))";
// the months_between() function results
// in a non-integral value, so trunc() it
case MONTH:
return "trunc(months_between(" + toExpression + ',' + fromExpression + "))";
case QUARTER:
return "trunc(months_between(" + toExpression + ',' + fromExpression + ")/3)";
case WEEK:
return "int((days" + toExpression + ")-days(" + fromExpression + "))/7)";
case DAY:
return "(days(" + toExpression + ")-days(" + fromExpression + "))";
default:
throw new UnsupportedOperationException( "Unsupported unit: " + unit );
}
}
@Override
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType, IntervalType intervalType) {
final StringBuilder pattern = new StringBuilder();
@ -665,7 +726,13 @@ public class DB2Dialect extends Dialect {
@Override
public void appendBinaryLiteral(SqlAppender appender, byte[] bytes) {
appender.appendSql( "BX'" );
if ( getDB2Version().isSameOrAfter( 11 ) ) {
appender.appendSql( "BX'" );
}
else {
// This should be fine on DB2 prior to 10
appender.appendSql( "X'" );
}
PrimitiveByteArrayJavaType.INSTANCE.appendString( appender, bytes );
appender.appendSql( '\'' );
}
@ -784,13 +851,18 @@ public class DB2Dialect extends Dialect {
@Override
public String extractPattern(TemporalUnit unit) {
if ( unit == TemporalUnit.WEEK ) {
// Not sure why, but `extract(week from '2019-05-27')` wrongly returns 21 and week_iso behaves correct
return "week_iso(?2)";
}
else {
return super.extractPattern( unit );
switch ( unit ) {
case WEEK:
// Not sure why, but `extract(week from '2019-05-27')` wrongly returns 21 and week_iso behaves correct
return "week_iso(?2)";
case DAY_OF_YEAR:
return "dayofyear(?2)";
case DAY_OF_WEEK:
return "dayofweek(?2)";
case QUARTER:
return "quarter(?2)";
}
return super.extractPattern( unit );
}
@Override

View File

@ -29,11 +29,14 @@ import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableReferenceJoin;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.insert.InsertStatement;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.QueryGroup;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.ast.tree.update.UpdateStatement;
import org.hibernate.sql.exec.spi.JdbcOperation;
@ -44,6 +47,9 @@ import org.hibernate.sql.exec.spi.JdbcOperation;
*/
public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAstTranslator<T> {
// We have to track whether we are in a later query for applying lateral during window emulation
private boolean inLateral;
public DB2SqlAstTranslator(SessionFactoryImplementor sessionFactory, Statement statement) {
super( sessionFactory, statement );
}
@ -114,7 +120,12 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
@Override
protected void renderExpressionAsClauseItem(Expression expression) {
expression.accept( this );
if ( expression instanceof Predicate && getDB2Version().isBefore( 11 ) ) {
super.renderExpressionAsClauseItem( expression );
}
else {
expression.accept( this );
}
}
@Override
@ -202,24 +213,60 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
}
protected boolean shouldEmulateFetchClause(QueryPart queryPart) {
// Percent fetches or ties fetches aren't supported in DB2
// According to LegacyDB2LimitHandler, variable limit also isn't supported before 11.1
// Check if current query part is already row numbering to avoid infinite recursion
return getQueryPartForRowNumbering() != queryPart && (
useOffsetFetchClause( queryPart ) && !isRowsOnlyFetchClauseType( queryPart )
|| getDB2Version().isBefore( 11, 1 ) && ( queryPart.isRoot() && hasLimit() || !( queryPart.getFetchClauseExpression() instanceof Literal ) )
);
if ( getQueryPartForRowNumbering() == queryPart ) {
return false;
}
// Percent fetches or ties fetches aren't supported in DB2
if ( useOffsetFetchClause( queryPart ) && !isRowsOnlyFetchClauseType( queryPart ) ) {
return true;
}
// According to LegacyDB2LimitHandler, variable limit also isn't supported before 11.1
return getDB2Version().isBefore( 11, 1 )
&& queryPart.getFetchClauseExpression() != null
&& !( queryPart.getFetchClauseExpression() instanceof Literal );
}
protected boolean supportsOffsetClause() {
return getDB2Version().isSameOrAfter( 11, 1 );
}
@Override
public void visitQueryPartTableReference(QueryPartTableReference tableReference) {
final boolean oldLateral = inLateral;
inLateral = tableReference.isLateral();
super.visitQueryPartTableReference( tableReference );
inLateral = oldLateral;
}
@Override
public void visitSelectStatement(SelectStatement statement) {
if ( getQueryPartForRowNumbering() == statement.getQueryPart() && inLateral ) {
appendSql( "lateral " );
}
super.visitSelectStatement( statement );
}
@Override
protected void emulateFetchOffsetWithWindowFunctionsVisitQueryPart(QueryPart queryPart) {
if ( inLateral ) {
appendSql( "lateral " );
final boolean oldLateral = inLateral;
inLateral = false;
super.emulateFetchOffsetWithWindowFunctionsVisitQueryPart( queryPart );
inLateral = oldLateral;
}
else {
super.emulateFetchOffsetWithWindowFunctionsVisitQueryPart( queryPart );
}
}
@Override
public void visitQueryGroup(QueryGroup queryGroup) {
final boolean emulateFetchClause = shouldEmulateFetchClause( queryGroup );
if ( emulateFetchClause || !supportsOffsetClause() && hasOffset( queryGroup ) ) {
emulateFetchOffsetWithWindowFunctions( queryGroup, emulateFetchClause );
if ( emulateFetchClause ||
getQueryPartForRowNumbering() != queryGroup && !supportsOffsetClause() && hasOffset( queryGroup ) ) {
emulateFetchOffsetWithWindowFunctions( queryGroup, true );
}
else {
super.visitQueryGroup( queryGroup );
@ -229,8 +276,9 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
@Override
public void visitQuerySpec(QuerySpec querySpec) {
final boolean emulateFetchClause = shouldEmulateFetchClause( querySpec );
if ( emulateFetchClause || !supportsOffsetClause() && hasOffset( querySpec ) ) {
emulateFetchOffsetWithWindowFunctions( querySpec, emulateFetchClause );
if ( emulateFetchClause ||
getQueryPartForRowNumbering() != querySpec && !supportsOffsetClause() && hasOffset( querySpec ) ) {
emulateFetchOffsetWithWindowFunctions( querySpec, true );
}
else {
super.visitQuerySpec( querySpec );
@ -252,6 +300,26 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
}
}
@Override
protected void renderOffsetExpression(Expression offsetExpression) {
if ( supportsParameterOffsetFetchExpression() ) {
super.renderOffsetExpression( offsetExpression );
}
else {
renderExpressionAsLiteral( offsetExpression, getJdbcParameterBindings() );
}
}
@Override
protected void renderFetchExpression(Expression fetchExpression) {
if ( supportsParameterOffsetFetchExpression() ) {
super.renderFetchExpression( fetchExpression );
}
else {
renderExpressionAsLiteral( fetchExpression, getJdbcParameterBindings() );
}
}
@Override
protected void visitDeleteStatementOnly(DeleteStatement statement) {
final boolean closeWrapper = renderReturningClause( statement );
@ -375,4 +443,8 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
return this.getDialect().getVersion();
}
protected boolean supportsParameterOffsetFetchExpression() {
return getDB2Version().isSameOrAfter( 11 );
}
}

View File

@ -852,7 +852,7 @@ public abstract class Dialect implements ConversionContext {
//supported on one database, but can be emulated using sum() and case,
//though there is a more natural mapping on some databases
functionFactory.everyAny_sumCase();
functionFactory.everyAny_sumCase( supportsPredicateAsExpression() );
//math functions supported on almost every database
@ -3900,6 +3900,14 @@ public abstract class Dialect implements ConversionContext {
return true;
}
/**
* Whether a predicate like `a > 0` can appear in an expression context e.g. a select item list.
*/
protected boolean supportsPredicateAsExpression() {
// Most databases seem to allow that
return true;
}
public void appendBinaryLiteral(SqlAppender appender, byte[] bytes) {
appender.appendSql( "X'" );
PrimitiveByteArrayJavaType.INSTANCE.appendString( appender, bytes );

View File

@ -254,6 +254,7 @@ public class H2Dialect extends Dialect {
functionFactory.radians();
functionFactory.degrees();
functionFactory.log10();
functionFactory.mod_operator();
functionFactory.rand();
functionFactory.truncate();
functionFactory.soundex();

View File

@ -230,6 +230,12 @@ public class H2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAstT
return true;
}
@Override
protected boolean supportsRowValueConstructorDistinctFromSyntax() {
// Seems that before, this was buggy
return getDialect().getVersion().isSameOrAfter( 1, 4, 200 );
}
@Override
protected boolean supportsNullPrecedence() {
// Support for nulls clause in listagg was added in 2.0

View File

@ -125,26 +125,6 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
final BinaryArithmeticOperator operator = arithmeticExpression.getOperator();
if ( operator == BinaryArithmeticOperator.MODULO ) {
append( "mod" );
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( ',' );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
else {
appendSql( OPEN_PARENTHESIS );
render( arithmeticExpression.getLeftHandOperand(), SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
render( arithmeticExpression.getRightHandOperand(), SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
appendSql( CLOSE_PARENTHESIS );
}
}
@Override
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;

View File

@ -80,6 +80,25 @@ public class MariaDBSqlAstTranslator<T extends JdbcOperation> extends AbstractSq
return useOffsetFetchClause( queryPart ) && getQueryPartForRowNumbering() != queryPart && supportsWindowFunctions() && !isRowsOnlyFetchClauseType( queryPart );
}
@Override
protected boolean supportsSimpleQueryGrouping() {
return getDialect().getVersion().isSameOrAfter( 10, 4 );
}
@Override
protected boolean shouldEmulateLateralWithIntersect(QueryPart queryPart) {
// Intersect emulation requires nested correlation when no simple query grouping is possible
// and the query has an offset/fetch clause, so we have to disable the emulation in this case,
// because nested correlation is not supported though
return supportsSimpleQueryGrouping() || !queryPart.hasOffsetOrFetchClause();
}
@Override
protected boolean supportsNestedSubqueryCorrelation() {
// It seems it doesn't support it
return false;
}
@Override
public void visitQueryGroup(QueryGroup queryGroup) {
if ( shouldEmulateFetchClause( queryGroup ) ) {

View File

@ -147,6 +147,19 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
return getQueryPartStack().findCurrentFirst( part -> part instanceof QueryGroup ? part : null ) != null;
}
@Override
protected boolean shouldEmulateLateralWithIntersect(QueryPart queryPart) {
// On Oracle 11 where there is no lateral support,
// make sure we don't use intersect if the query has an offset/fetch clause
return !queryPart.hasOffsetOrFetchClause();
}
@Override
protected boolean supportsNestedSubqueryCorrelation() {
// It seems it doesn't support it, at least on version 11
return false;
}
protected boolean shouldEmulateFetchClause(QueryPart queryPart) {
// Check if current query part is already row numbering to avoid infinite recursion
if (getQueryPartForRowNumbering() == queryPart) {
@ -200,22 +213,8 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
querySpec,
true, // we need select aliases to avoid ORA-00918: column ambiguously defined
() -> {
final QueryPart currentQueryPart = getQueryPartStack().getCurrent();
final boolean needsWrapper;
if ( currentQueryPart instanceof QueryGroup ) {
// visitQuerySpec will add the select wrapper
needsWrapper = !currentQueryPart.hasOffsetOrFetchClause();
}
else {
needsWrapper = true;
}
if ( needsWrapper ) {
appendSql( "select * from (" );
}
super.visitQuerySpec( querySpec );
if ( needsWrapper ) {
appendSql( ')' );
}
appendSql( "select * from " );
emulateFetchOffsetWithWindowFunctionsVisitQueryPart( querySpec );
appendSql( " where rownum<=" );
final Stack<Clause> clauseStack = getClauseStack();
clauseStack.push( Clause.WHERE );
@ -488,21 +487,4 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
return getDialect().supportsFetchClause( FetchClauseType.ROWS_ONLY );
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
final BinaryArithmeticOperator operator = arithmeticExpression.getOperator();
if ( operator == BinaryArithmeticOperator.MODULO ) {
append( "mod" );
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( ',' );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
return;
}
else {
super.visitBinaryArithmeticExpression( arithmeticExpression );
}
}
}

View File

@ -508,7 +508,8 @@ public class PostgreSQLDialect extends Dialect {
functionFactory.degrees();
functionFactory.trunc();
functionFactory.log();
if ( getVersion().isSameOrAfter(12) ) {
functionFactory.mod_operator();
if ( getVersion().isSameOrAfter( 12 ) ) {
functionFactory.log10();
functionFactory.tanh();
functionFactory.sinh();

View File

@ -12,6 +12,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteMaterialization;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
@ -212,4 +213,13 @@ public class PostgreSQLSqlAstTranslator<T extends JdbcOperation> extends Abstrac
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
}

View File

@ -14,7 +14,7 @@ package org.hibernate.dialect;
public class SQLServer2016Dialect extends SQLServerDialect {
public SQLServer2016Dialect() {
super( DatabaseVersion.make( 16 ) );
super( DatabaseVersion.make( 13 ) );
}
@Override

View File

@ -38,6 +38,7 @@ import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.TrimSpec;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
@ -327,6 +328,31 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
if ( getVersion().isSameOrAfter( 14 ) ) {
functionFactory.listagg_stringAggWithinGroup( "varchar(max)" );
}
if ( getVersion().isSameOrAfter( 16 ) ) {
functionFactory.leastGreatest();
}
}
@Override
public String trimPattern(TrimSpec specification, char character) {
if ( getVersion().isSameOrAfter( 16 ) ) {
switch ( specification ) {
case BOTH:
return character == ' '
? "trim(?1)"
: "trim('" + character + "' from ?1)";
case LEADING:
return character == ' '
? "ltrim(?1)"
: "ltrim(?1,'" + character + "')";
case TRAILING:
return character == ' '
? "rtrim(?1)"
: "rtrim(?1,'" + character + "')";
}
throw new UnsupportedOperationException( "Unsupported specification: " + specification );
}
return super.trimPattern( specification, character );
}
@Override
@ -412,6 +438,11 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
return true;
}
@Override
public boolean supportsDistinctFromPredicate() {
return getVersion().isSameOrAfter( 16 );
}
@Override
public char closeQuote() {
return ']';
@ -900,7 +931,7 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
@Override
public String[] getDropSchemaCommand(String schemaName) {
if ( getVersion().isSameOrAfter( 16 ) ) {
if ( getVersion().isSameOrAfter( 13 ) ) {
return new String[] { "drop schema if exists " + schemaName };
}
return super.getDropSchemaCommand( schemaName );

View File

@ -10,6 +10,7 @@ import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.query.sqm.BinaryArithmeticOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.sqm.ComparisonOperator;
@ -19,6 +20,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
@ -396,6 +398,15 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends Abstract
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
@Override
protected boolean supportsRowValueConstructorSyntax() {
return false;

View File

@ -18,6 +18,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
@ -311,6 +312,15 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
@Override
public void visitColumnReference(ColumnReference columnReference) {
final String dmlTargetTableAlias = getDmlTargetTableAlias();

View File

@ -17,6 +17,7 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
@ -148,6 +149,15 @@ public class SybaseSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
@Override
protected boolean supportsRowValueConstructorSyntax() {
return false;

View File

@ -435,9 +435,33 @@ public class CommonFunctionFactory {
*/
public void varianceSamp() {
functionRegistry.namedAggregateDescriptorBuilder( "variance_samp" )
.setInvariantType(doubleType)
.setInvariantType( doubleType )
.setExactArgumentCount( 1 )
.setParameterTypes(NUMERIC)
.setParameterTypes( NUMERIC )
.register();
}
private static final String VAR_SAMP_SUM_COUNT_PATTERN = "(sum(power(?1,2))-(power(sum(?1),2)/count(?1)))/nullif(count(?1)-1,0)";
/**
* DB2 before 11
*/
public void varSamp_sumCount() {
functionRegistry.patternAggregateDescriptorBuilder( "var_samp", VAR_SAMP_SUM_COUNT_PATTERN )
.setInvariantType( doubleType )
.setExactArgumentCount( 1 )
.setParameterTypes( NUMERIC )
.register();
}
/**
* DB2 before 11
*/
public void stddevSamp_sumCount() {
functionRegistry.patternAggregateDescriptorBuilder( "stddev_samp", "sqrt(" + VAR_SAMP_SUM_COUNT_PATTERN + ")" )
.setInvariantType( doubleType )
.setExactArgumentCount( 1 )
.setParameterTypes( NUMERIC )
.register();
}
@ -933,11 +957,11 @@ public class CommonFunctionFactory {
* for databases that have to emulate the boolean
* aggregation functions using sum() and case.
*/
public void everyAny_sumCase() {
public void everyAny_sumCase(boolean supportsPredicateAsExpression) {
functionRegistry.register( "every",
new EveryAnyEmulation( typeConfiguration, true ) );
new EveryAnyEmulation( typeConfiguration, true, supportsPredicateAsExpression ) );
functionRegistry.register( "any",
new EveryAnyEmulation( typeConfiguration, false ) );
new EveryAnyEmulation( typeConfiguration, false, supportsPredicateAsExpression ) );
}
/**
@ -1691,14 +1715,13 @@ public class CommonFunctionFactory {
/**
* For DB2 which has a broken implementation of overlay()
*/
public void overlayCharacterLength_overlay() {
public void overlayLength_overlay(boolean withCodeUnits) {
final String codeUnits = withCodeUnits ? " using codeunits32" : "";
functionRegistry.registerTernaryQuaternaryPattern(
"overlay",
stringType,
//use character_length() here instead of length()
//because DB2 doesn't like "length(?)"
"overlay(?1 placing ?2 from ?3 for character_length(?2))",
"overlay(?1 placing ?2 from ?3 for ?4)",
"overlay(?1 placing ?2 from ?3 for character_length(?2" + (withCodeUnits ? ",codeunits32" : "") + ")" + codeUnits + ")",
"overlay(?1 placing ?2 from ?3 for ?4" + codeUnits + ")",
STRING, STRING, INTEGER, INTEGER,
typeConfiguration
)

View File

@ -0,0 +1,62 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function;
import java.util.List;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.produce.function.ArgumentTypesValidator;
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.query.sqm.produce.function.StandardFunctionReturnTypeResolvers;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.ANY;
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.STRING;
/**
* DB2's position() function always requires a code unit before version 11.
*/
public class DB2PositionFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
public DB2PositionFunction(TypeConfiguration typeConfiguration) {
super(
"position",
new ArgumentTypesValidator( StandardArgumentsValidators.between( 2, 3 ), STRING, STRING, ANY ),
StandardFunctionReturnTypeResolvers.invariant( typeConfiguration.getBasicTypeRegistry().resolve(
StandardBasicTypes.INTEGER ) ),
StandardFunctionArgumentTypeResolvers.invariant( typeConfiguration, STRING, STRING )
);
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> arguments,
SqlAstTranslator<?> walker) {
final int argumentCount = arguments.size();
sqlAppender.appendSql( "position(" );
arguments.get( 0 ).accept( walker );
for ( int i = 1; i < argumentCount; i++ ) {
sqlAppender.appendSql( ',' );
arguments.get( i ).accept( walker );
}
if ( argumentCount != 3 ) {
sqlAppender.appendSql( ",codeunits32" );
}
sqlAppender.appendSql( ')' );
}
@Override
public String getSignature(String name) {
return "(STRING pattern in STRING string[, units]])";
}
}

View File

@ -0,0 +1,64 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function;
import java.util.List;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.produce.function.ArgumentTypesValidator;
import org.hibernate.query.sqm.produce.function.FunctionParameterType;
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.query.sqm.produce.function.StandardFunctionReturnTypeResolvers;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.INTEGER;
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.STRING;
/**
* DB2's substring() function requires a code unit and substr() can't optionally take it,
* so we render substr() by default. If the code unit is passed, we render substring().
*/
public class DB2SubstringFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
public DB2SubstringFunction(TypeConfiguration typeConfiguration) {
super(
"substring",
new ArgumentTypesValidator( StandardArgumentsValidators.between( 2, 4 ), STRING, INTEGER, INTEGER, FunctionParameterType.ANY ),
StandardFunctionReturnTypeResolvers.invariant( typeConfiguration.getBasicTypeRegistry().resolve(
StandardBasicTypes.STRING ) ),
StandardFunctionArgumentTypeResolvers.invariant( typeConfiguration, STRING, INTEGER, INTEGER )
);
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> arguments,
SqlAstTranslator<?> walker) {
final int argumentCount = arguments.size();
sqlAppender.appendSql( "substring(" );
arguments.get( 0 ).accept( walker );
for ( int i = 1; i < argumentCount; i++ ) {
sqlAppender.appendSql( ',' );
arguments.get( i ).accept( walker );
}
if ( argumentCount != 4 ) {
sqlAppender.appendSql( ",codeunits32" );
}
sqlAppender.appendSql( ')' );
}
@Override
public String getSignature(String name) {
return "(STRING string, INTEGER start[, INTEGER length[, units]])";
}
}

View File

@ -18,7 +18,9 @@ import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.QueryLiteral;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.type.BasicType;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.spi.TypeConfiguration;
@ -37,8 +39,10 @@ import static org.hibernate.query.sqm.produce.function.FunctionParameterType.BOO
public class EveryAnyEmulation extends AbstractSqmSelfRenderingFunctionDescriptor {
private final boolean every;
private final QueryLiteral<Boolean> trueLiteral;
private final QueryLiteral<Boolean> falseLiteral;
public EveryAnyEmulation(TypeConfiguration typeConfiguration, boolean every) {
public EveryAnyEmulation(TypeConfiguration typeConfiguration, boolean every, boolean supportsPredicateAsExpression) {
super(
every ? "every" : "any",
FunctionKind.AGGREGATE,
@ -49,6 +53,16 @@ public class EveryAnyEmulation extends AbstractSqmSelfRenderingFunctionDescripto
StandardFunctionArgumentTypeResolvers.invariant( typeConfiguration, BOOLEAN )
);
this.every = every;
if ( supportsPredicateAsExpression ) {
this.trueLiteral = null;
this.falseLiteral = null;
}
else {
final BasicType<Boolean> booleanBasicType = typeConfiguration.getBasicTypeRegistry()
.resolve( StandardBasicTypes.BOOLEAN );
this.trueLiteral = new QueryLiteral<>( true, booleanBasicType );
this.falseLiteral = new QueryLiteral<>( false, booleanBasicType );
}
}
@Override
@ -57,6 +71,9 @@ public class EveryAnyEmulation extends AbstractSqmSelfRenderingFunctionDescripto
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
SqlAstTranslator<?> walker) {
if ( trueLiteral != null ) {
sqlAppender.appendSql( "case when " );
}
sqlAppender.appendSql( "(sum(case when " );
if ( filter != null ) {
walker.getCurrentClauseStack().push( Clause.WHERE );
@ -80,6 +97,13 @@ public class EveryAnyEmulation extends AbstractSqmSelfRenderingFunctionDescripto
sqlAppender.appendSql( " then 1 else 0 end)>0)" );
}
}
if ( trueLiteral != null ) {
sqlAppender.appendSql( " then " );
walker.visitQueryLiteral( trueLiteral );
sqlAppender.appendSql( " else " );
walker.visitQueryLiteral( falseLiteral );
sqlAppender.appendSql( " end" );
}
}
@Override

View File

@ -18,7 +18,7 @@ public class OrderingSpecification implements Node {
private final OrderingExpression orderingExpression;
private String collation;
private SortOrder sortOrder;
private SortOrder sortOrder = SortOrder.ASCENDING;
private NullPrecedence nullPrecedence = NullPrecedence.NONE;
private String orderByValue;

View File

@ -1598,7 +1598,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
nextIndex++;
}
else {
sortOrder = null;
sortOrder = SortOrder.ASCENDING;
}
parseTree = ctx.getChild( nextIndex );
if ( parseTree instanceof HqlParser.NullsPrecedenceContext ) {
@ -1614,12 +1614,12 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
}
}
else {
nullPrecedence = null;
nullPrecedence = NullPrecedence.NONE;
}
}
else {
sortOrder = null;
nullPrecedence = null;
sortOrder = SortOrder.ASCENDING;
nullPrecedence = NullPrecedence.NONE;
}
return new SqmSortSpecification( sortExpression, sortOrder, nullPrecedence );

View File

@ -187,7 +187,7 @@ public class ArgumentTypesValidator implements ArgumentsValidator {
case BOOLEAN:
// ugh, need to be careful here, need to accept all the
// JDBC type codes that a Dialect might use for BOOLEAN
if ( code != BOOLEAN && code != BIT && code != TINYINT ) {
if ( code != BOOLEAN && code != BIT && code != TINYINT && code != SMALLINT ) {
throwError(type, javaType, functionName, count);
}
break;

View File

@ -26,17 +26,20 @@ public class SqmSortSpecification implements JpaOrder {
SqmExpression sortExpression,
SortOrder sortOrder,
NullPrecedence nullPrecedence) {
assert sortExpression != null;
assert sortOrder != null;
assert nullPrecedence != null;
this.sortExpression = sortExpression;
this.sortOrder = sortOrder;
this.nullPrecedence = nullPrecedence;
}
public SqmSortSpecification(SqmExpression sortExpression) {
this( sortExpression, SortOrder.ASCENDING, null );
this( sortExpression, SortOrder.ASCENDING, NullPrecedence.NONE );
}
public SqmSortSpecification(SqmExpression sortExpression, SortOrder sortOrder) {
this( sortExpression, sortOrder, null );
this( sortExpression, sortOrder, NullPrecedence.NONE );
}
public SqmSortSpecification copy(SqmCopyContext context) {

View File

@ -2846,7 +2846,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
needsParenthesis = !needsRowNumberingWrapper && !needsQueryGroupWrapper;
}
else {
needsParenthesis = !queryGroup.isRoot();
needsParenthesis = queryGroup.hasOffsetOrFetchClause() && !queryGroup.isRoot();
}
if ( needsParenthesis ) {
appendSql( OPEN_PARENTHESIS );
@ -2952,9 +2952,12 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
// because of order by precedence in SQL
if ( querySpec.hasOffsetOrFetchClause() ) {
queryGroupAlias = "";
// If the parent is a query group with a fetch clause,
// If the parent is a query group with a fetch clause we must use a select wrapper,
// or if the database does not support simple query grouping, we must use a select wrapper
if ( !supportsSimpleQueryGrouping() || currentQueryPart.hasOffsetOrFetchClause() ) {
if ( ( !supportsSimpleQueryGrouping() || currentQueryPart.hasOffsetOrFetchClause() )
// We can skip it though if this query spec is being row numbered,
// because then we already have a wrapper
&& queryPartForRowNumbering != querySpec ) {
queryGroupAlias = " grp_" + queryGroupAliasCounter + '_';
queryGroupAliasCounter++;
appendSql( "select" );
@ -4326,11 +4329,19 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
appendSql( OPEN_PARENTHESIS );
}
appendSql( "select " );
if ( getClauseStack().isEmpty() && !( getStatement() instanceof InsertStatement ) ) {
// When we emulate a root statement, we don't need to select the select items
// to filter out the row number column we introduce, because we will simply ignore it anyway
if ( getClauseStack().isEmpty() && !( getStatement() instanceof InsertStatement )
// If the query part is a child of a query group, we have can't do that,
// since we need the select items to properly align in query group parts
&& !( getCurrentQueryPart() instanceof QueryGroup ) ) {
appendSql( '*' );
}
else {
final int size = queryPart.getFirstQuerySpec().getSelectClause().getSqlSelections().size();
int size = 0;
for ( SqlSelection sqlSelection : queryPart.getFirstQuerySpec().getSelectClause().getSqlSelections() ) {
size += sqlSelection.getExpressionType().getJdbcTypeCount();
}
String separator = "";
for ( int i = 0; i < size; i++ ) {
appendSql( separator );
@ -4341,9 +4352,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
}
}
appendSql( " from " );
appendSql( OPEN_PARENTHESIS );
queryPart.accept( this );
appendSql( CLOSE_PARENTHESIS );
emulateFetchOffsetWithWindowFunctionsVisitQueryPart( queryPart );
appendSql( WHITESPACE );
appendSql( alias );
appendSql( " where " );
@ -4464,6 +4473,12 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
}
}
protected void emulateFetchOffsetWithWindowFunctionsVisitQueryPart(QueryPart queryPart) {
appendSql( OPEN_PARENTHESIS );
queryPart.accept( this );
appendSql( CLOSE_PARENTHESIS );
}
protected final void withRowNumbering(QueryPart queryPart, boolean needsSelectAliases, Runnable r) {
final QueryPart queryPartForRowNumbering = this.queryPartForRowNumbering;
final int queryPartForRowNumberingClauseDepth = this.queryPartForRowNumberingClauseDepth;
@ -4527,46 +4542,56 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
}
if ( needsSelectAliases || referenceStrategy == SelectItemReferenceStrategy.ALIAS && hasSelectAliasInGroupByClause() ) {
String separator = NO_SEPARATOR;
if ( columnAliases == null ) {
for ( int i = 0; i < size; i++ ) {
final SqlSelection sqlSelection = sqlSelections.get( i );
appendSql( separator );
if ( selectItemsToInline != null && selectItemsToInline.get( i ) ) {
parameterRenderingMode = SqlAstNodeRenderingMode.INLINE_ALL_PARAMETERS;
}
else {
parameterRenderingMode = defaultRenderingMode;
}
visitSqlSelection( sqlSelection );
parameterRenderingMode = original;
appendSql( " c" );
appendSql( i );
separator = COMA_SEPARATOR;
int offset = 0;
for ( int i = 0; i < size; i++ ) {
final SqlSelection sqlSelection = sqlSelections.get( i );
if ( selectItemsToInline != null && selectItemsToInline.get( i ) ) {
parameterRenderingMode = SqlAstNodeRenderingMode.INLINE_ALL_PARAMETERS;
}
}
else {
int offset = 0;
for ( int i = 0; i < size; i++ ) {
final SqlSelection sqlSelection = sqlSelections.get( i );
else {
parameterRenderingMode = defaultRenderingMode;
}
final Expression expression = sqlSelection.getExpression();
final SqlTuple sqlTuple = SqlTupleContainer.getSqlTuple( expression );
if ( sqlTuple != null ) {
final List<? extends Expression> expressions = sqlTuple.getExpressions();
for ( Expression e : expressions ) {
appendSql( separator );
renderSelectExpression( e );
appendSql( WHITESPACE );
if ( columnAliases == null ) {
appendSql( 'c' );
appendSql( offset );
}
else {
appendSql( columnAliases.get( offset ) );
}
offset++;
separator = COMA_SEPARATOR;
}
}
else {
appendSql( separator );
if ( selectItemsToInline != null && selectItemsToInline.get( i ) ) {
parameterRenderingMode = SqlAstNodeRenderingMode.INLINE_ALL_PARAMETERS;
}
else {
parameterRenderingMode = defaultRenderingMode;
}
offset += visitSqlSelectExpression( sqlSelection.getExpression(), offset, columnAliases );
parameterRenderingMode = original;
renderSelectExpression( expression );
appendSql( WHITESPACE );
appendSql( columnAliases.get( offset - 1 ) );
if ( columnAliases == null ) {
appendSql( 'c' );
appendSql( offset );
}
else {
appendSql( columnAliases.get( offset ) );
}
offset++;
separator = COMA_SEPARATOR;
}
parameterRenderingMode = original;
}
if ( queryPartForRowNumbering != null ) {
renderRowNumberingSelectItems( selectClause, queryPartForRowNumbering );
}
}
else if ( columnAliases == null ) {
else {
assert columnAliases == null;
String separator = NO_SEPARATOR;
for ( int i = 0; i < size; i++ ) {
final SqlSelection sqlSelection = sqlSelections.get( i );
@ -4582,25 +4607,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
separator = COMA_SEPARATOR;
}
}
else {
String separator = NO_SEPARATOR;
int offset = 0;
for ( int i = 0; i < size; i++ ) {
final SqlSelection sqlSelection = sqlSelections.get( i );
appendSql( separator );
if ( selectItemsToInline != null && selectItemsToInline.get( i ) ) {
parameterRenderingMode = SqlAstNodeRenderingMode.INLINE_ALL_PARAMETERS;
}
else {
parameterRenderingMode = defaultRenderingMode;
}
offset += visitSqlSelectExpression( sqlSelection.getExpression(), offset, columnAliases );
appendSql( WHITESPACE );
appendSql( columnAliases.get( offset - 1 ) );
parameterRenderingMode = original;
separator = COMA_SEPARATOR;
}
}
}
protected void renderVirtualSelections(SelectClause selectClause) {
@ -4953,32 +4959,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
}
}
protected int visitSqlSelectExpression(Expression expression, int offset, List<String> columnAliases) {
final SqlTuple sqlTuple = SqlTupleContainer.getSqlTuple( expression );
if ( sqlTuple != null ) {
boolean isFirst = true;
final List<? extends Expression> expressions = sqlTuple.getExpressions();
int i = 0;
for ( ; i < expressions.size(); i++ ) {
Expression e = expressions.get( i );
if ( isFirst ) {
isFirst = false;
}
else {
appendSql( WHITESPACE );
appendSql( columnAliases.get( offset + i - 1 ) );
appendSql( ',' );
}
renderSelectExpression( e );
}
return i;
}
else {
renderSelectExpression( expression );
return 1;
}
}
protected void renderSelectExpression(Expression expression) {
renderExpressionAsClauseItem( expression );
}
@ -5394,7 +5374,9 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
}
protected void emulateQueryPartTableReferenceColumnAliasing(QueryPartTableReference tableReference) {
final boolean needsSelectAliases = this.needsSelectAliases;
final List<String> columnAliases = this.columnAliases;
this.needsSelectAliases = true;
this.columnAliases = tableReference.getColumnNames();
if ( tableReference.getQueryPart().isRoot() ) {
appendSql( OPEN_PARENTHESIS );
@ -5404,6 +5386,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
else {
tableReference.getStatement().accept( this );
}
this.needsSelectAliases = needsSelectAliases;
this.columnAliases = columnAliases;
renderTableReferenceIdentificationVariable( tableReference );
}
@ -5590,7 +5573,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
// The following optimization only makes sense if the necessary features are supported natively
if ( ( columnReferences.size() == 1 || supportsRowValueConstructorSyntax() )
&& supportsDistinctFromPredicate() ) {
&& supportsRowValueConstructorDistinctFromSyntax() ) {
// Special case for limit 1 sub-queries to avoid double nested sub-query
// ... x(c) on x.c is not distinct from (... fetch first 1 rows only)
if ( isFetchFirstRowOnly( statement.getQueryPart() ) ) {
@ -5604,7 +5587,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
// Render with exists intersect sub-query if possible as that is shorter and more efficient
// ... x(c) on exists(select x.c intersect ...)
if ( supportsIntersect() ) {
if ( shouldEmulateLateralWithIntersect( statement.getQueryPart() ) ) {
final QuerySpec lhsReferencesQuery = new QuerySpec( false );
for ( ColumnReference columnReference : columnReferences ) {
lhsReferencesQuery.getSelectClause().addSqlSelection(
@ -5698,7 +5681,14 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
final List<Expression> subExpressions = new ArrayList<>( columnNames.size() );
for ( SqlSelection sqlSelection : querySpec.getSelectClause().getSqlSelections() ) {
subExpressions.add( sqlSelection.getExpression() );
final Expression selectionExpression = sqlSelection.getExpression();
final SqlTuple sqlTuple = SqlTupleContainer.getSqlTuple( selectionExpression );
if ( sqlTuple == null ) {
subExpressions.add( selectionExpression );
}
else {
subExpressions.addAll( sqlTuple.getExpressions() );
}
}
final QuerySpec existsQuery = new QuerySpec( false, querySpec.getFromClause().getRoots().size() );
existsQuery.getFromClause().getRoots().addAll( querySpec.getFromClause().getRoots() );
@ -5804,11 +5794,11 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
);
}
final ComparisonOperator comparisonOperator;
if ( sortSpecification.getSortOrder() == SortOrder.ASCENDING ) {
comparisonOperator = ComparisonOperator.LESS_THAN_OR_EQUAL;
if ( sortSpecification.getSortOrder() == SortOrder.DESCENDING ) {
comparisonOperator = ComparisonOperator.GREATER_THAN_OR_EQUAL;
}
else {
comparisonOperator = ComparisonOperator.GREATER_THAN_OR_EQUAL;
comparisonOperator = ComparisonOperator.LESS_THAN_OR_EQUAL;
}
countQuery.applyPredicate(
new Junction(
@ -5868,6 +5858,10 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
return null;
}
protected boolean shouldEmulateLateralWithIntersect(QueryPart queryPart) {
return supportsIntersect();
}
private boolean isNullsFirst(SortSpecification sortSpecification) {
NullPrecedence nullPrecedence = sortSpecification.getNullPrecedence();
if ( nullPrecedence == null || nullPrecedence == NullPrecedence.NONE ) {
@ -5966,7 +5960,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
private boolean needsLateralSortExpressionVirtualSelections(QuerySpec querySpec) {
return !( ( querySpec.getSelectClause().getSqlSelections().size() == 1 || supportsRowValueConstructorSyntax() ) && supportsDistinctFromPredicate() && isFetchFirstRowOnly( querySpec ) )
&& !supportsIntersect()
&& !shouldEmulateLateralWithIntersect( querySpec )
&& !supportsNestedSubqueryCorrelation()
&& querySpec.hasOffsetOrFetchClause();
}
@ -6273,11 +6267,22 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
final BinaryArithmeticOperator operator = arithmeticExpression.getOperator();
if ( operator == BinaryArithmeticOperator.MODULO ) {
append( "mod" );
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( ',' );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
else {
appendSql( OPEN_PARENTHESIS );
arithmeticExpression.getLeftHandOperand().accept( this );
appendSql( arithmeticExpression.getOperator().getOperatorSqlTextString() );
arithmeticExpression.getRightHandOperand().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
}
@Override
@ -7285,6 +7290,9 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
case GREATER_THAN:
case GREATER_THAN_OR_EQUAL:
return !supportsRowValueConstructorGtLtSyntax();
case DISTINCT_FROM:
case NOT_DISTINCT_FROM:
return !supportsRowValueConstructorDistinctFromSyntax();
}
return false;
}
@ -7342,6 +7350,21 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
return supportsRowValueConstructorSyntax();
}
/**
* Is this dialect known to support what ANSI-SQL terms "row value
* constructor" syntax; sometimes called tuple syntax with <code>is distinct from</code>
* and <code>is not distinct from</code> operators.
* <p/>
* Basically, does it support syntax like
* "... where (FIRST_NAME, LAST_NAME) is distinct from ('Steve', 'Ebersole') ...".
*
* @return True if this SQL dialect is known to support "row value
* constructor" syntax with distinct from comparison operators; false otherwise.
*/
protected boolean supportsRowValueConstructorDistinctFromSyntax() {
return supportsRowValueConstructorSyntax() && supportsDistinctFromPredicate();
}
/**
* Is this dialect known to support what ANSI-SQL terms "row value constructor" syntax,
* sometimes called tuple syntax, in the SET clause;

View File

@ -25,6 +25,9 @@ public class SortSpecification implements SqlAstNode {
}
public SortSpecification(Expression sortExpression, SortOrder sortOrder, NullPrecedence nullPrecedence) {
assert sortExpression != null;
assert sortOrder != null;
assert nullPrecedence != null;
this.sortExpression = sortExpression;
this.sortOrder = sortOrder;
this.nullPrecedence = nullPrecedence;

View File

@ -12,6 +12,8 @@ import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.beanvalidation.ValidationMode;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import org.junit.Test;
@ -20,6 +22,7 @@ import org.junit.Test;
* @author Andrea Boriero
*/
@TestForIssue(jiraKey = "HHH-13959")
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class NotNullManyToOneTest extends BaseCoreFunctionalTestCase {
@Override

View File

@ -32,7 +32,7 @@ public class Misc3Test {
@Table(name = "A")
public static final class A {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@GeneratedValue
private Long id;
@Basic
private String name;
@ -42,7 +42,7 @@ public class Misc3Test {
@Table(name = "B", uniqueConstraints = {@UniqueConstraint(columnNames = {"a_id", "uniqueName"})})
public static final class B {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@GeneratedValue
private Long id;
@Basic
@ -56,7 +56,7 @@ public class Misc3Test {
@Table(name = "C", uniqueConstraints = {@UniqueConstraint(columnNames = {"a_id", "uniqueName"})})
public static final class C {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@GeneratedValue
private Long id;
@Basic

View File

@ -121,7 +121,7 @@ public class MySQLDropConstraintThrowsExceptionTest {
.filter(
sql -> sql.toLowerCase().contains( "alter " )
).map( String::trim ).collect( Collectors.toList() );
if ( metadata.getDatabase().getDialect() instanceof MariaDBDialect ) {
if ( metadata.getDatabase().getDialect().supportsIfExistsAfterAlterTable() ) {
assertTrue( alterStatements.get( 0 ).matches( "alter table if exists CUSTOMER\\s+drop index .*?" ) );
assertTrue( alterStatements.get( 1 )
.matches( "alter table if exists CUSTOMER\\s+add constraint .*? unique \\(CUSTOMER_ID\\)" ) );

View File

@ -14,6 +14,8 @@ import org.hibernate.boot.jaxb.spi.Binding;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.cfg.Configuration;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import org.junit.Test;
@ -21,6 +23,7 @@ import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
@TestForIssue(jiraKey = {"HHH-14530", "HHH-14529"})
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class PreParsedOrmXmlTest extends BaseCoreFunctionalTestCase {
@Override

View File

@ -13,6 +13,8 @@ import java.io.UncheckedIOException;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.jaxb.spi.Binding;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.orm.junit.BaseSessionFactoryFunctionalTest;
import org.junit.jupiter.api.Test;
@ -20,6 +22,7 @@ import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
@TestForIssue(jiraKey = "HHH-14530")
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsIdentityColumns.class)
public class PreParsedHbmXmlTest extends BaseSessionFactoryFunctionalTest {
@Override

View File

@ -17,19 +17,24 @@ import jakarta.persistence.Id;
import jakarta.persistence.OptimisticLockException;
import jakarta.persistence.Version;
import org.hibernate.StaleObjectStateException;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.CockroachDialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.testing.SkipForDialect;
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
import org.junit.Test;
import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author Vlad Mihalcea
*/
@SkipForDialect(value = CockroachDialect.class, comment = "See https://hibernate.atlassian.net/browse/HHH-15668")
public class BatchOptimisticLockingTest extends
BaseNonConfigCoreFunctionalTestCase {
@ -101,6 +106,11 @@ public class BatchOptimisticLockingTest extends
expected.getMessage()
);
}
else if ( getDialect() instanceof OracleDialect && getDialect().getVersion().isBefore( 12 ) ) {
assertTrue(
expected.getCause() instanceof StaleObjectStateException
);
}
else {
assertEquals(
"Batch update returned unexpected row count from update [1]; actual row count: 0; expected: 1; statement executed: update Person set name=?, version=? where id=? and version=?",

View File

@ -55,6 +55,8 @@ import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.hibernate.testing.AfterClassOnce;
import org.hibernate.testing.BeforeClassOnce;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.jdbc.SharedDriverManagerConnectionProviderImpl;
import org.hibernate.testing.junit4.CustomParameterized;
@ -82,6 +84,7 @@ import static org.assertj.core.api.Assertions.assertThat;
@RunWith(CustomParameterized.class)
@TestForIssue(jiraKey = { "HHH-14921", "HHH-14922", "HHH-15212" })
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class DefaultCatalogAndSchemaTest {
private static final String SQL_QUOTE_CHARACTER_CLASS = "([`\"]|\\[|\\])";

View File

@ -13,6 +13,8 @@ import java.util.List;
import org.hibernate.boot.SessionFactoryBuilder;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
@ -42,6 +44,7 @@ import static org.junit.Assert.assertFalse;
*/
@TestForIssue(jiraKey = "HHH-14360")
@RunWith(BytecodeEnhancerRunner.class)
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class DirtyTrackingNotInDefaultFetchGroupPersistTest extends BaseNonConfigCoreFunctionalTestCase {
@Override

View File

@ -27,6 +27,8 @@ import jakarta.persistence.TemporalType;
import org.hibernate.boot.SessionFactoryBuilder;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
@ -41,6 +43,7 @@ import static org.junit.Assert.assertTrue;
*/
@TestForIssue(jiraKey = "HHH-14360")
@RunWith(BytecodeEnhancerRunner.class)
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class DirtyTrackingPersistTest extends BaseNonConfigCoreFunctionalTestCase {
@Override

View File

@ -19,6 +19,8 @@ import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
import org.hibernate.testing.bytecode.enhancement.CustomEnhancementContext;
@ -34,6 +36,7 @@ import static org.junit.Assert.assertThat;
@TestForIssue(jiraKey = "HHH14424")
@RunWith(BytecodeEnhancerRunner.class)
@CustomEnhancementContext({ DirtyCheckEnhancementContext.class, NoDirtyCheckEnhancementContext.class })
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class LoadAndUpdateEntitiesWithCollectionsTest extends BaseNonConfigCoreFunctionalTestCase {
boolean skipTest;

View File

@ -19,6 +19,8 @@ import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
import org.hibernate.testing.bytecode.enhancement.CustomEnhancementContext;
@ -37,6 +39,7 @@ import static org.junit.Assert.assertThat;
@TestForIssue(jiraKey = "HHH14424")
@RunWith(BytecodeEnhancerRunner.class)
@CustomEnhancementContext({ NoDirtyCheckEnhancementContext.class, DirtyCheckEnhancementContext.class })
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class DynamicUpdateAndCollectionsTest extends BaseNonConfigCoreFunctionalTestCase {
boolean skipTest;

View File

@ -226,7 +226,7 @@ public class CompositeUserTypeTest {
final Transaction t2 = new Transaction();
t2.setDescription( "bar" );
t2.setValue( new MonetoryAmount( new BigDecimal( 1000000 ), Currency.getInstance( "USD" ) ) );
t1.setTimestamp( new CompositeDateTime( 2014, 8, 22, 14, 23, 0 ) );
t2.setTimestamp( new CompositeDateTime( 2014, 8, 22, 14, 23, 0 ) );
session.persist( t2 );
final Transaction t3 = new Transaction();

View File

@ -17,6 +17,8 @@ import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
import org.hibernate.testing.transaction.TransactionUtil;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorOracleDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
@ -70,33 +72,40 @@ public class OracleDialectSequenceInformationTest extends BaseNonConfigCoreFunct
}
private SequenceInformation fetchSequenceInformation(String sequenceName) throws SQLException {
try ( Connection connection = sessionFactory().getJdbcServices()
.getBootstrapJdbcConnectionAccess()
.obtainConnection() ) {
JdbcEnvironment jdbcEnvironment = sessionFactory().getJdbcServices().getJdbcEnvironment();
SequenceInformationExtractorOracleDatabaseImpl sequenceExtractor = SequenceInformationExtractorOracleDatabaseImpl.INSTANCE;
Iterable<SequenceInformation> sequenceInformations = sequenceExtractor.extractMetadata(
new ExtractionContext.EmptyExtractionContext() {
return TransactionUtil.doWithJDBC(
sessionFactory().getServiceRegistry(),
connection -> {
JdbcEnvironment jdbcEnvironment = sessionFactory().getJdbcServices().getJdbcEnvironment();
SequenceInformationExtractorOracleDatabaseImpl sequenceExtractor = SequenceInformationExtractorOracleDatabaseImpl.INSTANCE;
Iterable<SequenceInformation> sequenceInformations = sequenceExtractor.extractMetadata(
new ExtractionContext.EmptyExtractionContext() {
@Override
public Connection getJdbcConnection() {
return connection;
}
@Override
public Connection getJdbcConnection() {
return connection;
}
@Override
public JdbcEnvironment getJdbcEnvironment() {
return jdbcEnvironment;
}
} );
@Override
public JdbcEnvironment getJdbcEnvironment() {
return jdbcEnvironment;
}
} );
// lets skip system sequences
Optional<SequenceInformation> foundSequence = StreamSupport.stream( sequenceInformations.spliterator(), false )
.filter( sequence -> sequenceName.equals( sequence.getSequenceName().getSequenceName().getText().toUpperCase() ) )
.findFirst();
// lets skip system sequences
Optional<SequenceInformation> foundSequence = StreamSupport.stream(
sequenceInformations.spliterator(),
false
)
.filter( sequence -> sequenceName.equals( sequence.getSequenceName()
.getSequenceName()
.getText()
.toUpperCase() ) )
.findFirst();
assertTrue( sequenceName + " not found", foundSequence.isPresent() );
assertTrue( sequenceName + " not found", foundSequence.isPresent() );
return foundSequence.get();
}
return foundSequence.get();
}
);
}
}

View File

@ -11,6 +11,7 @@ import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id;
import jakarta.persistence.PersistenceException;
import org.hibernate.QueryException;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.DB2Dialect;
@ -115,8 +116,6 @@ public class DB297SubStringFunctionsTest extends BaseCoreFunctionalTestCase {
@TestForIssue( jiraKey = "HHH-11957")
public void testSubstrWithStringUnits() {
mostRecentStatementInspector.clear();
try {
doInHibernate(
this::sessionFactory, session -> {
@ -129,12 +128,9 @@ public class DB297SubStringFunctionsTest extends BaseCoreFunctionalTestCase {
);
fail( "Should have failed because substr cannot be used with string units." );
}
catch (PersistenceException expected) {
assertTrue( SQLGrammarException.class.isInstance( expected.getCause() ) );
catch (IllegalArgumentException expected) {
assertTrue( QueryException.class.isInstance( expected.getCause() ) );
}
assertTrue( mostRecentStatementInspector.mostRecentSql.contains( "substr(" ) );
assertTrue( mostRecentStatementInspector.mostRecentSql.contains( "octets" ) );
}
@Test

View File

@ -15,8 +15,10 @@ import jakarta.persistence.Table;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.Test;
@ -27,6 +29,7 @@ import org.junit.jupiter.api.Test;
*/
@TestForIssue(jiraKey = "HHH-9983")
@RequiresDialect( OracleDialect.class )
@RequiresDialectFeature( feature = DialectFeatureChecks.SupportsIdentityColumns.class )
@DomainModel( annotatedClasses = SaveEntityTest.Company.class )
@SessionFactory
public class SaveEntityTest {

View File

@ -8,7 +8,9 @@ import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.AfterEach;
@ -35,6 +37,7 @@ import static org.hamcrest.Matchers.notNullValue;
)
@SessionFactory
@TestForIssue(jiraKey = "HHH-15512")
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsIdentityColumns.class)
public class PersistAndQueryingInSameTransactionTest {
@AfterEach

View File

@ -32,11 +32,13 @@ import org.hibernate.dialect.DerbyDialect;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.MariaDBDialect;
import org.hibernate.dialect.MySQLDialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.dialect.PostgresPlusDialect;
import org.hibernate.dialect.SybaseDialect;
import org.hibernate.dialect.TiDBDialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.jpa.boot.internal.EntityManagerFactoryBuilderImpl;
import org.hibernate.jpa.boot.spi.Bootstrap;
@ -51,7 +53,7 @@ import org.hibernate.type.descriptor.jdbc.NumericJdbcType;
import org.hibernate.type.descriptor.jdbc.RealJdbcType;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.SkipForDialect;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.CustomRunner;
import org.hibernate.testing.orm.jpa.PersistenceUnitDescriptorAdapter;
@ -97,7 +99,7 @@ public class NativeQueryResultTypeAutoDiscoveryTest {
}
@Test
@SkipForDialect(value=OracleDialect.class, comment="Oracle maps integer types to number")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle maps integer types to number")
public void smallintType() {
createEntityManagerFactory(SmallintEntity.class);
doTest( SmallintEntity.class, (short)32767 );
@ -121,56 +123,61 @@ public class NativeQueryResultTypeAutoDiscoveryTest {
}
@Test
@SkipForDialect(value = SybaseDialect.class, comment = "No support for the bit datatype so we use tinyint")
@SkipForDialect(value = OracleDialect.class, comment = "No support for the bit datatype so we use number(1,0)")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "No support for the bit datatype so we use tinyint")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "No support for the bit datatype so we use number(1,0)")
@SkipForDialect(dialectClass = DB2Dialect.class, majorVersion = 10, reason = "No support for the bit datatype so we use smallint")
public void booleanType() {
createEntityManagerFactory( BooleanEntity.class );
doTest( BooleanEntity.class, true );
}
@Test
@SkipForDialect(value = SybaseDialect.class, comment = "No support for the bit datatype so we use tinyint")
@SkipForDialect(value = OracleDialect.class, comment = "No support for the bit datatype so we use number(1,0)")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "No support for the bit datatype so we use tinyint")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "No support for the bit datatype so we use number(1,0)")
@SkipForDialect(dialectClass = DB2Dialect.class, majorVersion = 10, reason = "No support for the bit datatype so we use smallint")
public void bitType() {
createEntityManagerFactory( BitEntity.class );
doTest( BitEntity.class, false );
}
@Test
@SkipForDialect(value = PostgreSQLDialect.class, comment = "Turns tinyints into shorts in result sets and advertises the type as short in the metadata")
@SkipForDialect(value = CockroachDialect.class, comment = "Turns tinyints into shorts in result sets and advertises the type as short in the metadata")
@SkipForDialect(value = DerbyDialect.class, comment = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(value = DB2Dialect.class, comment = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(value = AbstractTransactSQLDialect.class, comment = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(value = AbstractHANADialect.class, comment = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(value = OracleDialect.class, comment="Oracle maps tinyint to number")
@SkipForDialect(dialectClass = PostgreSQLDialect.class, reason = "Turns tinyints into shorts in result sets and advertises the type as short in the metadata")
@SkipForDialect(dialectClass = PostgresPlusDialect.class, reason = "Turns tinyints into shorts in result sets and advertises the type as short in the metadata")
@SkipForDialect(dialectClass = CockroachDialect.class, reason = "Turns tinyints into shorts in result sets and advertises the type as short in the metadata")
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(dialectClass = AbstractTransactSQLDialect.class, matchSubTypes = true, reason = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "No support for the tinyint datatype so we use smallint")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle maps tinyint to number")
public void tinyintType() {
createEntityManagerFactory( TinyintEntity.class );
doTest( TinyintEntity.class, (byte)127 );
}
@Test
@SkipForDialect(value = H2Dialect.class, comment = "Turns floats into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(value = HSQLDialect.class, comment = "Turns floats into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(dialectClass = H2Dialect.class, reason = "Turns floats into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(dialectClass = HSQLDialect.class, reason = "Turns floats into doubles in result sets and advertises the type as double in the metadata")
public void floatType() {
createEntityManagerFactory( FloatEntity.class );
doTest( FloatEntity.class, 15516.125f );
}
@Test
@SkipForDialect(value = MySQLDialect.class, comment = "Turns reals into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(value = HSQLDialect.class, comment = "Turns reals into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(dialectClass = MySQLDialect.class, reason = "Turns reals into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(dialectClass = MariaDBDialect.class, reason = "Turns reals into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(dialectClass = TiDBDialect.class, reason = "Turns reals into doubles in result sets and advertises the type as double in the metadata")
@SkipForDialect(dialectClass = HSQLDialect.class, reason = "Turns reals into doubles in result sets and advertises the type as double in the metadata")
public void realType() {
createEntityManagerFactory( RealEntity.class );
doTest( RealEntity.class, 15516.125f );
}
@Test
@SkipForDialect(value = DerbyDialect.class, comment = "Value is too big for the maximum allowed precision of Derby")
@SkipForDialect(value = DB2Dialect.class, comment = "Value is too big for the maximum allowed precision of DB2")
@SkipForDialect(value = OracleDialect.class, comment = "Value is too big for the maximum allowed precision of Oracle")
@SkipForDialect(value = AbstractTransactSQLDialect.class, comment = "Value is too big for the maximum allowed precision of SQL Server and Sybase")
@SkipForDialect(value = AbstractHANADialect.class, comment = "Value is too big for the maximum allowed precision of HANA")
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Value is too big for the maximum allowed precision of Derby")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "Value is too big for the maximum allowed precision of DB2")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Value is too big for the maximum allowed precision of Oracle")
@SkipForDialect(dialectClass = AbstractTransactSQLDialect.class, matchSubTypes = true, reason = "Value is too big for the maximum allowed precision of SQL Server and Sybase")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "Value is too big for the maximum allowed precision of HANA")
public void numericType() {
createEntityManagerFactory(
NumericEntity.class
@ -179,11 +186,11 @@ public class NativeQueryResultTypeAutoDiscoveryTest {
}
@Test
@SkipForDialect(value = DerbyDialect.class, comment = "Value is too big for the maximum allowed precision of Derby")
@SkipForDialect(value = DB2Dialect.class, comment = "Value is too big for the maximum allowed precision of DB2")
@SkipForDialect(value = OracleDialect.class, comment = "Value is too big for the maximum allowed precision of Oracle")
@SkipForDialect(value = AbstractTransactSQLDialect.class, comment = "Value is too big for the maximum allowed precision of SQL Server and Sybase")
@SkipForDialect(value = AbstractHANADialect.class, comment = "Value is too big for the maximum allowed precision of HANA")
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Value is too big for the maximum allowed precision of Derby")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "Value is too big for the maximum allowed precision of DB2")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Value is too big for the maximum allowed precision of Oracle")
@SkipForDialect(dialectClass = AbstractTransactSQLDialect.class, matchSubTypes = true, reason = "Value is too big for the maximum allowed precision of SQL Server and Sybase")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "Value is too big for the maximum allowed precision of HANA")
public void decimalType() {
createEntityManagerFactory( DecimalEntity.class );
doTest( DecimalEntity.class, new BigDecimal( "5464384284258458485484848458.48465843584584684" ) );
@ -201,10 +208,10 @@ public class NativeQueryResultTypeAutoDiscoveryTest {
}
@Test
@SkipForDialect(value = OracleDialect.class, comment = "Oracle maps LONGVARCHAR to CLOB")
@SkipForDialect(value = DB2Dialect.class, comment = "DB2 maps LONGVARCHAR to CLOB")
@SkipForDialect(value = SybaseDialect.class, comment = "Sybase maps LONGVARCHAR to CLOB")
@SkipForDialect(value = AbstractHANADialect.class, comment = "HANA maps LONGVARCHAR to CLOB")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle maps LONGVARCHAR to CLOB")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "DB2 maps LONGVARCHAR to CLOB")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "Sybase maps LONGVARCHAR to CLOB")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "HANA maps LONGVARCHAR to CLOB")
public void longCharType() {
createEntityManagerFactory(
LongvarcharEntity.class
@ -239,10 +246,10 @@ public class NativeQueryResultTypeAutoDiscoveryTest {
}
@Test
@SkipForDialect(value = OracleDialect.class, comment = "Oracle maps LONGVARBINARY to BLOB")
@SkipForDialect(value = DB2Dialect.class, comment = "DB2 maps LONGVARBINARY to BLOB")
@SkipForDialect(value = SybaseDialect.class, comment = "Sybase maps LONGVARBINARY to BLOB")
@SkipForDialect(value = AbstractHANADialect.class, comment = "HANA maps LONGVARCHAR to BLOB")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle maps LONGVARBINARY to BLOB")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "DB2 maps LONGVARBINARY to BLOB")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "Sybase maps LONGVARBINARY to BLOB")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "HANA maps LONGVARCHAR to BLOB")
public void longBinaryType() {
createEntityManagerFactory(
LongvarbinaryEntity.class
@ -273,8 +280,8 @@ public class NativeQueryResultTypeAutoDiscoveryTest {
}
@Test
@SkipForDialect(value = OracleDialect.class, comment = "Oracle maps DATE and TIME to TIMESTAMP")
@SkipForDialect(value = PostgresPlusDialect.class, comment = "EDB maps DATE and TIME to TIMESTAMP")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle maps DATE and TIME to TIMESTAMP")
@SkipForDialect(dialectClass = PostgresPlusDialect.class, reason = "EDB maps DATE and TIME to TIMESTAMP")
public void dateTimeTypes() {
createEntityManagerFactory(
DateEntity.class,

View File

@ -5,6 +5,7 @@ import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import org.hibernate.annotations.GeneratedColumn;
import org.hibernate.dialect.DerbyDialect;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.dialect.SybaseASEDialect;
@ -24,6 +25,7 @@ import static org.junit.Assert.assertEquals;
*/
@DomainModel(annotatedClasses = GeneratedAlwaysTest.OrderLine.class)
@SessionFactory
@SkipForDialect(dialectClass = H2Dialect.class, majorVersion = 1) // 'generated always' was added in 2.0
@SkipForDialect(dialectClass = HSQLDialect.class)
@SkipForDialect(dialectClass = DerbyDialect.class)
@SkipForDialect(dialectClass = SybaseASEDialect.class)

View File

@ -18,10 +18,10 @@ import org.hibernate.annotations.NaturalIdCache;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
@ -38,7 +38,6 @@ import static org.hamcrest.MatcherAssert.assertThat;
* @author Alex Burgel
*/
@TestForIssue( jiraKey = "HHH-11330" )
@RequiresDialectFeature( value = DialectChecks.SupportsIdentityColumns.class )
@ServiceRegistry(
settings = {
@Setting( name = AvailableSettings.GENERATE_STATISTICS, value = "true" ),
@ -47,6 +46,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
)
@DomainModel( annotatedClasses = IdentityGeneratorWithNaturalIdCacheTest.Person.class )
@SessionFactory
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsIdentityColumns.class)
public class IdentityGeneratorWithNaturalIdCacheTest {
@BeforeEach
public void prepareTestData(SessionFactoryScope scope) {

View File

@ -8,6 +8,9 @@ package org.hibernate.orm.test.pagination;
import java.util.List;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.testing.orm.domain.StandardDomainModel;
import org.hibernate.testing.orm.domain.gambit.EntityOfLists;
import org.hibernate.testing.orm.domain.gambit.EnumValue;
@ -18,6 +21,7 @@ import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@ -69,6 +73,7 @@ public class SubqueryPaginationTest {
}
@Test
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "Generates nested correlated subquery which is not supported in that version")
public void testLimitInSubquery(SessionFactoryScope scope) {
scope.inSession(
session -> {
@ -83,6 +88,8 @@ public class SubqueryPaginationTest {
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsOffsetInSubquery.class)
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "Generates nested correlated subquery which is not supported in that version")
@SkipForDialect(dialectClass = DB2Dialect.class, majorVersion = 10, reason = "Generates nested correlated subquery which is not supported in that version")
public void testLimitAndOffsetInSubquery(SessionFactoryScope scope) {
scope.inSession(
session -> {

View File

@ -7,6 +7,8 @@ import jakarta.persistence.criteria.Root;
import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.junit.Test;
@ -17,6 +19,7 @@ import static org.hibernate.testing.transaction.TransactionUtil.doInJPA;
* @author Nathan Xu
*/
@TestForIssue( jiraKey = "HHH-11877" )
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class HHH111877Test extends BaseEntityManagerFunctionalTestCase {
@Override

View File

@ -9,6 +9,8 @@ import jakarta.persistence.criteria.Subquery;
import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.junit.Test;
@ -19,6 +21,7 @@ import static org.hibernate.testing.transaction.TransactionUtil.doInJPA;
* @author Nathan Xu
*/
@TestForIssue( jiraKey = "HHH-14197" )
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class HHH14197Test extends BaseEntityManagerFunctionalTestCase {
@Override

View File

@ -1,8 +1,10 @@
package org.hibernate.orm.test.query.criteria.internal.hhh14916;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.EntityManagerFactoryScope;
import org.hibernate.testing.orm.junit.Jpa;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@ -19,6 +21,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
@Jpa(
annotatedClasses = { Author.class, Book.class, Chapter.class }
)
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsIdentityColumns.class)
public class HHH14916Test {
@BeforeEach

View File

@ -42,6 +42,7 @@ import org.hamcrest.Matchers;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.isOneOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
@ -1437,7 +1438,7 @@ public class FunctionTests {
.list();
assertThat(
session.createQuery("select format(theTime as '''Hello'', hh:mm:ss a') from EntityOfBasics where id=123").getResultList().get(0),
is("Hello, 08:10:08 PM")
isOneOf( "Hello, 08:10:08 PM", "Hello, 08:10:08 pm" )
);
}
);

View File

@ -14,6 +14,7 @@ import java.time.LocalTime;
import org.hibernate.dialect.CockroachDialect;
import org.hamcrest.Matchers;
import org.hamcrest.number.IsCloseTo;
import org.hibernate.testing.orm.domain.StandardDomainModel;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
@ -30,6 +31,7 @@ import org.junit.jupiter.api.Test;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.isOneOf;
/**
* @author Steve Ebersole
@ -924,7 +926,7 @@ public class StandardFunctionTests {
"select format(e.theTime as '''Hello'', hh:mm:ss a') from EntityOfBasics e" )
.getResultList()
.get( 0 ),
is( "Hello, 08:10:08 PM" )
isOneOf( "Hello, 08:10:08 PM", "Hello, 08:10:08 pm" )
);
}
);

View File

@ -11,9 +11,9 @@ import org.hibernate.tool.schema.internal.exec.GenerationTarget;
class RecordingTarget implements GenerationTarget {
public enum Category {
SCHEMA_CREATE( Pattern.compile( "create schema (.*)" ) ),
SCHEMA_DROP( Pattern.compile( "drop schema (.*)" ) ),
SCHEMA_DROP( Pattern.compile( "drop schema(?: if exists)? (.*)" ) ),
TABLE_CREATE( Pattern.compile( "create table (\\S+) .*" ) ),
TABLE_DROP( Pattern.compile( "drop table (.*)" ) ),
TABLE_DROP( Pattern.compile( "drop table(?: if exists)? (.*)" ) ),
SEQUENCE_CREATE(Pattern.compile( "create sequence (.*) start (.*)" )),
SEQUENCE_DROP(Pattern.compile( "drop sequence if exists (.*)" ));

View File

@ -10,6 +10,8 @@ import org.hibernate.envers.configuration.EnversSettings;
import org.hibernate.orm.test.envers.BaseEnversFunctionalTestCase;
import org.hibernate.orm.test.envers.Priority;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.junit.Test;
import java.time.Duration;
@ -27,6 +29,7 @@ import static org.junit.Assert.assertEquals;
* @author Luke Chen
*/
@TestForIssue(jiraKey = "HHH-13945")
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class ManyToOneLazyDeleteTest extends BaseEnversFunctionalTestCase {
private Long shipmentId;
private User user;

View File

@ -15,6 +15,8 @@ import org.hibernate.orm.test.envers.BaseEnversFunctionalTestCase;
import org.hibernate.orm.test.envers.Priority;
import org.junit.Test;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate;
@ -27,6 +29,7 @@ import static org.junit.Assert.assertEquals;
* @author Chris Cranford
*/
@TestForIssue(jiraKey = "HHH-13760")
@RequiresDialectFeature(DialectChecks.SupportsIdentityColumns.class)
public class ManyToOneLazyFetchTest extends BaseEnversFunctionalTestCase {
private Long shipmentId;

View File

@ -20,7 +20,6 @@ import org.junit.Test;
/**
* @author Adam Warski (adam at warski dot org)
*/
@SuppressWarnings("unchecked")
public class OrderByLimitQuery extends BaseEnversJPAFunctionalTestCase {
private Integer id1;
private Integer id2;

View File

@ -117,6 +117,9 @@ public class HikariCPConnectionProvider implements ConnectionProvider, Configura
@Override
public void stop() {
hds.close();
HikariDataSource hds = this.hds;
if ( hds != null ) {
hds.close();
}
}
}

View File

@ -16,6 +16,4 @@ hibernate.connection.provider_class HikariCPConnectionProvider
hibernate.hikari.poolName testPool
# Purposefully low and simplisitic.
hibernate.hikari.maximumPoolSize 2
hibernate.hikari.connectionTimeout 1000
hibernate.hikari.idleTimeout 3000
hibernate.hikari.maximumPoolSize 2

View File

@ -40,16 +40,16 @@ sourceSets.test.resources {
tasks.test {
enabled = ['pgsql',
'h2',
enabled = ['h2',
'pgsql',
'pgsql_ci',
'cockroachdb',
'mariadb',
'mariadb_ci',
'mysql',
'mysql_ci',
'mysql_docker',
'oracle_docker',
'oracle',
'oracle_ci',
'oracle_rds',
'mssql',
'mssql_ci'
].contains( project.db )

View File

@ -22,6 +22,7 @@ import org.hibernate.spatial.testing.dialects.oracle.OracleSTNativeSqlTemplates;
import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
/**
* Only for Oracle: run the tests in "OGC_STRICT" mode (i.e. using the SQL MultiMedia functions)
@ -30,6 +31,8 @@ import org.hibernate.testing.orm.junit.Setting;
@ServiceRegistry(settings = {
@Setting(name = HibernateSpatialConfigurationSettings.ORACLE_OGC_STRICT, value = "true")
})
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 21, reason = "See https://hibernate.atlassian.net/browse/HHH-15669")
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "See https://hibernate.atlassian.net/browse/HHH-15669")
public class OracleSQLMMFunctionTests extends CommonFunctionTests {
public OracleSQLMMFunctionTests() {
this.templates = new OracleSTNativeSqlTemplates();

View File

@ -20,6 +20,7 @@ import java.util.Locale;
import java.util.Objects;
import java.util.stream.Stream;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.spatial.integration.Model;
import org.hibernate.spatial.testing.IsSupportedBySpatial;
import org.hibernate.spatial.testing.SpatialTestBase;
@ -27,6 +28,7 @@ import org.hibernate.spatial.testing.datareader.TestSupport;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.TestFactory;
import org.junit.jupiter.api.function.Executable;
@ -49,6 +51,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
@SuppressWarnings("rawtypes")
@RequiresDialectFeature(feature = IsSupportedBySpatial.class)
@SessionFactory
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 21, reason = "See https://hibernate.atlassian.net/browse/HHH-15669")
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "See https://hibernate.atlassian.net/browse/HHH-15669")
public class CommonFunctionTests extends SpatialTestBase {
public final static TestSupport.TestDataPurpose PURPOSE = TestSupport.TestDataPurpose.SpatialFunctionsData;

View File

@ -12,6 +12,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.spatial.CommonSpatialFunction;
@ -22,12 +23,14 @@ import org.hibernate.spatial.testing.domain.GeomEntity;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.TestFactory;
import org.junit.jupiter.api.function.Executable;
@RequiresDialectFeature(feature = IsSupportedBySpatial.class)
@SessionFactory
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "See https://hibernate.atlassian.net/browse/HHH-15669")
public class TestGeometryConstructionWithParameter extends SpatialTestBase {
final private Map<CommonSpatialFunction, String> templates = new HashMap<>();

View File

@ -9,6 +9,7 @@ package org.hibernate.spatial.integration.predicates;
import java.util.List;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.spatial.predicate.GeolatteSpatialPredicates;
import org.hibernate.spatial.testing.IsSupportedBySpatial;
import org.hibernate.spatial.testing.SpatialSessionFactoryAware;
@ -18,6 +19,7 @@ import org.hibernate.spatial.testing.domain.SpatialDomainModel;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.Test;
import jakarta.persistence.criteria.CriteriaBuilder;
@ -34,6 +36,7 @@ import static org.geolatte.geom.crs.CoordinateReferenceSystems.WGS84;
@DomainModel(modelDescriptorClasses = SpatialDomainModel.class)
@SessionFactory
@RequiresDialectFeature(feature = IsSupportedBySpatial.class)
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "See https://hibernate.atlassian.net/browse/HHH-15669")
public class PredicateSmokeTest extends SpatialSessionFactoryAware {
Polygon<G2D> poly = polygon(

View File

@ -10,6 +10,7 @@ package org.hibernate.spatial.integration.predicates;
import java.util.stream.Stream;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.spatial.HibernateSpatialConfigurationSettings;
import org.hibernate.spatial.testing.IsSupportedBySpatial;
import org.hibernate.spatial.testing.dialects.PredicateRegexes;
@ -18,6 +19,7 @@ import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
@RequiresDialectFeature(feature = IsSupportedBySpatial.class)
@ServiceRegistry(settings = {
@ -25,6 +27,7 @@ import org.hibernate.testing.orm.junit.Setting;
@Setting(name = HibernateSpatialConfigurationSettings.ORACLE_OGC_STRICT, value = "true")
})
@SessionFactory
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "See https://hibernate.atlassian.net/browse/HHH-15669")
public class SpatialPredicatesTestInlineMode extends SpatialPredicatesTest{
@Override
public Stream<PredicateRegexes.PredicateRegex> getTestRegexes() {

View File

@ -26,9 +26,13 @@ import org.hibernate.testing.Skip;
import org.hibernate.testing.SkipForDialect;
import org.hibernate.testing.SkipForDialects;
import org.hibernate.testing.orm.junit.DialectContext;
import org.hibernate.testing.orm.junit.DialectFilterExtension;
import org.hibernate.testing.orm.junit.SkipForDialectGroup;
import org.hibernate.testing.orm.junit.TestingUtil;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.jupiter.api.extension.ConditionEvaluationResult;
import org.junit.runner.manipulation.NoTestsRemainException;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
@ -264,6 +268,48 @@ public class CustomRunner extends BlockJUnit4ClassRunner {
}
}
for ( org.hibernate.testing.orm.junit.SkipForDialect effectiveSkipForDialect : Helper.collectAnnotations(
org.hibernate.testing.orm.junit.SkipForDialect.class, SkipForDialectGroup.class, frameworkMethod, getTestClass()
) ) {
final boolean versionsMatch;
final int matchingMajorVersion = effectiveSkipForDialect.majorVersion();
if ( matchingMajorVersion >= 0 ) {
versionsMatch = DialectFilterExtension.versionsMatch(
matchingMajorVersion,
effectiveSkipForDialect.minorVersion(),
effectiveSkipForDialect.microVersion(),
dialect,
effectiveSkipForDialect.matchSubTypes()
);
if ( versionsMatch ) {
if ( effectiveSkipForDialect.matchSubTypes() ) {
if ( effectiveSkipForDialect.dialectClass().isInstance( dialect ) ) {
return buildIgnore( effectiveSkipForDialect );
}
}
else {
if ( effectiveSkipForDialect.dialectClass().equals( dialect.getClass() ) ) {
return buildIgnore( effectiveSkipForDialect );
}
}
}
}
else {
if ( effectiveSkipForDialect.matchSubTypes() ) {
if ( effectiveSkipForDialect.dialectClass().isInstance( dialect ) ) {
return buildIgnore( effectiveSkipForDialect );
}
}
else {
if ( effectiveSkipForDialect.dialectClass().equals( dialect.getClass() ) ) {
return buildIgnore( effectiveSkipForDialect );
}
}
}
}
// @RequiresDialects & @RequiresDialect
final List<RequiresDialect> requiresDialects = Helper.collectAnnotations(
@ -328,6 +374,10 @@ public class CustomRunner extends BlockJUnit4ClassRunner {
return buildIgnore( "@SkipForDialect match", skip.comment(), skip.jiraKey() );
}
private Ignore buildIgnore(org.hibernate.testing.orm.junit.SkipForDialect skip) {
return buildIgnore( "@SkipForDialect match", skip.reason(), null );
}
private Ignore buildIgnore(String reason, String comment, String jiraKey) {
return new IgnoreImpl( getIgnoreMessage( reason, comment, jiraKey ) );
}

View File

@ -396,7 +396,7 @@ abstract public class DialectFeatureChecks {
|| dialect instanceof PostgreSQLDialect
|| dialect instanceof AbstractHANADialect
|| dialect instanceof CockroachDialect
|| dialect instanceof DB2Dialect
|| dialect instanceof DB2Dialect && ( (DB2Dialect) dialect ).getDB2Version().isSameOrAfter( 11 )
|| dialect instanceof OracleDialect
|| dialect instanceof SpannerDialect
|| dialect instanceof SQLServerDialect;
@ -409,7 +409,7 @@ abstract public class DialectFeatureChecks {
|| dialect instanceof PostgreSQLDialect
|| dialect instanceof AbstractHANADialect
|| dialect instanceof CockroachDialect
|| dialect instanceof DB2Dialect
|| dialect instanceof DB2Dialect && ( (DB2Dialect) dialect ).getDB2Version().isSameOrAfter( 11 )
|| dialect instanceof OracleDialect
|| dialect instanceof SpannerDialect
|| dialect instanceof SQLServerDialect;
@ -451,7 +451,8 @@ abstract public class DialectFeatureChecks {
public boolean apply(Dialect dialect) {
return !( dialect instanceof MySQLDialect
|| dialect instanceof SybaseDialect
|| dialect instanceof DerbyDialect )
|| dialect instanceof DerbyDialect
|| dialect instanceof DB2Dialect && ( (DB2Dialect) dialect ).getDB2Version().isBefore( 11 ) )
|| dialect instanceof MariaDBDialect;
}
}

View File

@ -111,7 +111,7 @@ public class DialectFilterExtension implements ExecutionCondition {
return buffer.toString();
}
private boolean versionsMatch(
public static boolean versionsMatch(
int matchingMajorVersion,
int matchingMinorVersion,
int matchingMicroVersion,

View File

@ -652,16 +652,30 @@ public class TransactionUtil {
StandardServiceRegistry ssr = ssrb.build();
try {
try (Connection connection = ssr.getService( JdbcServices.class )
.getBootstrapJdbcConnectionAccess()
.obtainConnection();
Statement statement = connection.createStatement()) {
final JdbcConnectionAccess connectionAccess = ssr.getService( JdbcServices.class )
.getBootstrapJdbcConnectionAccess();
final Connection connection;
try {
connection = connectionAccess.obtainConnection();
}
catch (SQLException e) {
throw new RuntimeException( e );
}
try (Statement statement = connection.createStatement()) {
connection.setAutoCommit( true );
consumer.accept( statement );
}
catch (SQLException e) {
log.debug( e.getMessage() );
}
finally {
try {
connectionAccess.releaseConnection( connection );
}
catch (SQLException e) {
// ignore
}
}
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
@ -717,7 +731,13 @@ public class TransactionUtil {
public static void doWithJDBC(ServiceRegistry serviceRegistry, JDBCTransactionVoidFunction function) throws SQLException {
final JdbcConnectionAccess connectionAccess = serviceRegistry.getService( JdbcServices.class )
.getBootstrapJdbcConnectionAccess();
Connection connection = connectionAccess.obtainConnection();
final Connection connection;
try {
connection = connectionAccess.obtainConnection();
}
catch (SQLException e) {
throw new RuntimeException( e );
}
try {
function.accept( connection );
}
@ -735,7 +755,13 @@ public class TransactionUtil {
public static <T> T doWithJDBC(ServiceRegistry serviceRegistry, JDBCTransactionFunction<T> function) throws SQLException {
final JdbcConnectionAccess connectionAccess = serviceRegistry.getService( JdbcServices.class )
.getBootstrapJdbcConnectionAccess();
Connection connection = connectionAccess.obtainConnection();
final Connection connection;
try {
connection = connectionAccess.obtainConnection();
}
catch (SQLException e) {
throw new RuntimeException( e );
}
try {
return function.accept( connection );
}

View File

@ -26,7 +26,21 @@ this.helper = new JobHelper(this)
helper.runWithNotification {
stage('Configure') {
this.environments = [
// Minimum supported versions
new BuildEnvironment( dbName: 'h2_1_4' ),
new BuildEnvironment( dbName: 'hsqldb_2_6' ),
new BuildEnvironment( dbName: 'derby_10_14' ),
new BuildEnvironment( dbName: 'mysql_5_7' ),
new BuildEnvironment( dbName: 'mariadb_10_3' ),
new BuildEnvironment( dbName: 'postgresql_10' ),
new BuildEnvironment( dbName: 'edb_10' ),
new BuildEnvironment( dbName: 'oracle_11_2' ),
new BuildEnvironment( dbName: 'db2_10_5', longRunning: true ),
new BuildEnvironment( dbName: 'mssql_2017' ), // Unfortunately there is no SQL Server 2008 image, so we have to test with 2017
// new BuildEnvironment( dbName: 'sybase_16' ), // There only is a Sybase ASE 16 image, so no pint in testing that nightly
// Long running databases
new BuildEnvironment( dbName: 'cockroachdb', node: 'cockroachdb', longRunning: true ),
new BuildEnvironment( dbName: 'cockroachdb_21_2', node: 'cockroachdb', longRunning: true ),
new BuildEnvironment( dbName: 'hana_cloud', dbLockableResource: 'hana-cloud', dbLockResourceAsHost: true )
];
@ -94,55 +108,84 @@ stage('Build') {
try {
stage('Start database') {
switch (buildEnv.dbName) {
case "cockroachdb":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('cockroachdb/cockroach:v21.1.21').pull()
}
sh "./docker_db.sh cockroachdb"
state[buildEnv.tag]['containerName'] = "cockroach"
case "h2_1_4":
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
" -Pgradle.libs.versions.h2=1.4.197 -Pgradle.libs.versions.h2gis=1.5.0"
break;
case "hsqldb_2_6":
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
" -Pgradle.libs.versions.hsqldb=2.6.1"
break;
case "derby_10_14":
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
" -Pgradle.libs.versions.derby=10.14.2.0"
break;
case "mysql":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mysql:5.7').pull()
docker.image('mysql:8.0.31').pull()
}
sh "./docker_db.sh mysql"
state[buildEnv.tag]['containerName'] = "mysql"
break;
case "mysql8":
case "mysql_5_7":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mysql:8.0.21').pull()
docker.image('mysql:5.7.40').pull()
}
sh "./docker_db.sh mysql_8_0"
sh "./docker_db.sh mysql_5_7"
state[buildEnv.tag]['containerName'] = "mysql"
break;
case "mariadb":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mariadb:10.7.5').pull()
docker.image('mariadb:10.9.3').pull()
}
sh "./docker_db.sh mariadb"
state[buildEnv.tag]['containerName'] = "mariadb"
break;
case "mariadb_10_3":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('mariadb:10.3.36').pull()
}
sh "./docker_db.sh mariadb_10_3"
state[buildEnv.tag]['containerName'] = "mariadb"
break;
case "postgresql":
// use the postgis image to enable the PGSQL GIS (spatial) extension
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('postgis/postgis:9.5-2.5').pull()
docker.image('postgis/postgis:14-3.3').pull()
}
sh "./docker_db.sh postgresql"
state[buildEnv.tag]['containerName'] = "postgres"
break;
case "postgresql_14":
case "postgresql_10":
// use the postgis image to enable the PGSQL GIS (spatial) extension
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('postgis/postgis:14-3.3').pull()
docker.image('postgis/postgis:10-2.5').pull()
}
sh "./docker_db.sh postgresql_14"
sh "./docker_db.sh postgresql_10"
state[buildEnv.tag]['containerName'] = "postgres"
break;
case "edb":
docker.image('quay.io/enterprisedb/edb-postgres-advanced:14.5-3.2-postgis').pull()
sh "./docker_db.sh edb"
state[buildEnv.tag]['containerName'] = "edb"
break;
case "edb_10":
docker.image('quay.io/enterprisedb/edb-postgres-advanced:10.22').pull()
sh "./docker_db.sh edb_10"
state[buildEnv.tag]['containerName'] = "edb"
break;
case "oracle":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('gvenzl/oracle-xe:18.4.0-full').pull()
docker.image('gvenzl/oracle-xe:21.3.0-full').pull()
}
sh "./docker_db.sh oracle_18"
sh "./docker_db.sh oracle"
state[buildEnv.tag]['containerName'] = "oracle"
break;
case "oracle_11_2":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('gvenzl/oracle-xe:11.2.0.2-full').pull()
}
sh "./docker_db.sh oracle_11"
state[buildEnv.tag]['containerName'] = "oracle"
break;
case "db2":
@ -152,11 +195,23 @@ stage('Build') {
sh "./docker_db.sh db2"
state[buildEnv.tag]['containerName'] = "db2"
break;
case "db2_10_5":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('ibmoms/db2express-c@sha256:a499afd9709a1f69fb41703e88def9869955234c3525547e2efc3418d1f4ca2b').pull()
}
sh "./docker_db.sh db2_10_5"
state[buildEnv.tag]['containerName'] = "db2"
break;
case "mssql":
docker.image('mcr.microsoft.com/mssql/server@sha256:f54a84b8a802afdfa91a954e8ddfcec9973447ce8efec519adf593b54d49bedf').pull()
sh "./docker_db.sh mssql"
state[buildEnv.tag]['containerName'] = "mssql"
break;
case "mssql_2017":
docker.image('mcr.microsoft.com/mssql/server@sha256:7d194c54e34cb63bca083542369485c8f4141596805611e84d8c8bab2339eede').pull()
sh "./docker_db.sh mssql_2017"
state[buildEnv.tag]['containerName'] = "mssql"
break;
case "sybase":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('nguoianphu/docker-sybase').pull()
@ -164,10 +219,19 @@ stage('Build') {
sh "./docker_db.sh sybase"
state[buildEnv.tag]['containerName'] = "sybase"
break;
case "edb":
docker.image('quay.io/enterprisedb/edb-postgres-advanced:10.22').pull()
sh "./docker_db.sh edb"
state[buildEnv.tag]['containerName'] = "edb"
case "cockroachdb":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('cockroachdb/cockroach:v22.1.10').pull()
}
sh "./docker_db.sh cockroachdb"
state[buildEnv.tag]['containerName'] = "cockroach"
break;
case "cockroachdb_21_2":
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
docker.image('cockroachdb/cockroach:v21.2.16').pull()
}
sh "./docker_db.sh cockroachdb_21_2"
state[buildEnv.tag]['containerName'] = "cockroach"
break;
}
}
@ -176,7 +240,7 @@ stage('Build') {
withEnv(["RDBMS=${buildEnv.dbName}"]) {
try {
if (buildEnv.dbLockableResource == null) {
timeout( [time: buildEnv.longRunning ? 240 : 120, unit: 'MINUTES'] ) {
timeout( [time: buildEnv.longRunning ? 480 : 120, unit: 'MINUTES'] ) {
sh cmd
}
}
@ -185,7 +249,7 @@ stage('Build') {
if ( buildEnv.dbLockResourceAsHost ) {
cmd += " -DdbHost=${LOCKED_RESOURCE}"
}
timeout( [time: buildEnv.longRunning ? 240 : 120, unit: 'MINUTES'] ) {
timeout( [time: buildEnv.longRunning ? 480 : 120, unit: 'MINUTES'] ) {
sh cmd
}
}

View File

@ -160,15 +160,35 @@ dependencyResolutionManagement {
alias( "weld" ).to( "org.jboss.weld.se", "weld-se-shaded" ).version( "4.0.1.SP1" )
}
dbLibs {
version( "h2", "2.1.210" )
String h2Version = settings.ext.find( "gradle.libs.versions.h2" )
if ( h2Version == null ) {
h2Version = "2.1.214"
}
String h2gisVersion = settings.ext.find( "gradle.libs.versions.h2gis" )
if ( h2gisVersion == null ) {
h2gisVersion = "2.1.0"
}
String hsqldbVersion = settings.ext.find( "gradle.libs.versions.hsqldb" )
if ( hsqldbVersion == null ) {
hsqldbVersion = "2.7.1"
}
String derbyVersion = settings.ext.find( "gradle.libs.versions.derby" )
if ( derbyVersion == null ) {
// Latest Derby version 10.16.1.1 only supports JDK 17+, but 10.15.2 should be compatible
derbyVersion = "10.15.2.0"
}
version( "h2", h2Version )
version( "h2gis", h2gisVersion )
version( "hsqldb", hsqldbVersion )
version( "derby", derbyVersion )
version( "pgsql", "42.5.0" )
version( "mysql", "8.0.27" )
version( "oracle", "21.3.0.0" )
alias( "h2" ).to( "com.h2database", "h2" ).versionRef( "h2" )
alias( "h2gis" ).to( "org.orbisgis", "h2gis" ).version( "2.0.0" )
alias( "hsqldb" ).to( "org.hsqldb", "hsqldb" ).version( "2.6.1" )
alias( "derby" ).to( "org.apache.derby", "derby" ).version( "10.14.2.0" )
alias( "h2gis" ).to( "org.orbisgis", "h2gis" ).versionRef( "h2gis" )
alias( "hsqldb" ).to( "org.hsqldb", "hsqldb" ).versionRef( "hsqldb" )
alias( "derby" ).to( "org.apache.derby", "derby" ).versionRef( "derby" )
alias( "postgresql" ).to( "org.postgresql", "postgresql" ).versionRef( "pgsql" )
alias( "cockroachdb" ).to( "org.postgresql", "postgresql" ).version( "42.2.8" )
alias( "mysql" ).to( "mysql", "mysql-connector-java" ).versionRef( "mysql" )