Merge remote-tracking branch 'upstream/master' into wip/6.0
This commit is contained in:
commit
4931c7e69f
|
@ -0,0 +1,7 @@
|
||||||
|
# Reclaim disk space, otherwise we only have 13 GB free at the start of a job
|
||||||
|
|
||||||
|
docker rmi node:10 node:12 mcr.microsoft.com/azure-pipelines/node8-typescript:latest
|
||||||
|
# That is 18 GB
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
# That is 1.2 GB
|
||||||
|
sudo rm -rf /usr/share/swift
|
|
@ -43,6 +43,8 @@ jobs:
|
||||||
experimental: true
|
experimental: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
- name: Reclaim Disk Space
|
||||||
|
run: .github/ci-prerequisites.sh
|
||||||
- name: Set up Java 8
|
- name: Set up Java 8
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
hibernate.dialect org.hibernate.dialect.PostgreSQL94Dialect
|
hibernate.dialect org.hibernate.dialect.PostgreSQL94Dialect
|
||||||
hibernate.connection.driver_class org.postgresql.Driver
|
hibernate.connection.driver_class org.postgresql.Driver
|
||||||
hibernate.connection.url jdbc:postgresql:hibernate_orm_test
|
hibernate.connection.url jdbc:postgresql://localhost:5432/hibernate_orm_test?preparedStatementCacheQueries=0
|
||||||
hibernate.connection.username hibernate_orm_test
|
hibernate.connection.username hibernate_orm_test
|
||||||
hibernate.connection.password hibernate_orm_test
|
hibernate.connection.password hibernate_orm_test
|
||||||
|
|
||||||
|
|
64
docker_db.sh
64
docker_db.sh
|
@ -92,6 +92,70 @@ alter database drop logfile group 3;
|
||||||
EOF\""
|
EOF\""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
oracle_ee() {
|
||||||
|
docker rm -f oracle || true
|
||||||
|
# We need to use the defaults
|
||||||
|
# sys as sysdba/Oradoc_db1
|
||||||
|
docker run --name oracle -d -p 1521:1521 store/oracle/database-enterprise:12.2.0.1-slim
|
||||||
|
# Give the container some time to start
|
||||||
|
OUTPUT=
|
||||||
|
while [[ $OUTPUT != *"NLS_CALENDAR"* ]]; do
|
||||||
|
echo "Waiting for Oracle to start..."
|
||||||
|
sleep 10
|
||||||
|
OUTPUT=$(docker logs oracle)
|
||||||
|
done
|
||||||
|
echo "Oracle successfully started"
|
||||||
|
# We increase file sizes to avoid online resizes as that requires lots of CPU which is restricted in XE
|
||||||
|
docker exec oracle bash -c "source /home/oracle/.bashrc; \$ORACLE_HOME/bin/sqlplus sys/Oradoc_db1@ORCLCDB as sysdba <<EOF
|
||||||
|
create user c##hibernate_orm_test identified by hibernate_orm_test container=all;
|
||||||
|
grant connect, resource, dba to c##hibernate_orm_test container=all;
|
||||||
|
alter database tempfile '/u02/app/oracle/oradata/ORCL/temp01.dbf' resize 400M;
|
||||||
|
alter database datafile '/u02/app/oracle/oradata/ORCL/system01.dbf' resize 1000M;
|
||||||
|
alter database datafile '/u02/app/oracle/oradata/ORCL/sysaux01.dbf' resize 900M;
|
||||||
|
alter database datafile '/u02/app/oracle/oradata/ORCL/undotbs01.dbf' resize 300M;
|
||||||
|
alter database add logfile group 4 '/u02/app/oracle/oradata/ORCL/redo04.log' size 500M reuse;
|
||||||
|
alter database add logfile group 5 '/u02/app/oracle/oradata/ORCL/redo05.log' size 500M reuse;
|
||||||
|
alter database add logfile group 6 '/u02/app/oracle/oradata/ORCL/redo06.log' size 500M reuse;
|
||||||
|
|
||||||
|
alter system switch logfile;
|
||||||
|
alter system switch logfile;
|
||||||
|
alter system switch logfile;
|
||||||
|
alter system checkpoint;
|
||||||
|
|
||||||
|
alter database drop logfile group 1;
|
||||||
|
alter database drop logfile group 2;
|
||||||
|
alter database drop logfile group 3;
|
||||||
|
alter session set container=ORCLPDB1;
|
||||||
|
alter database datafile '/u02/app/oracle/oradata/ORCLCDB/orclpdb1/system01.dbf' resize 500M;
|
||||||
|
alter database datafile '/u02/app/oracle/oradata/ORCLCDB/orclpdb1/sysaux01.dbf' resize 500M;
|
||||||
|
EOF"
|
||||||
|
}
|
||||||
|
|
||||||
|
hana() {
|
||||||
|
temp_dir=$(mktemp -d)
|
||||||
|
echo '{"master_password" : "H1bernate_test"}' >$temp_dir/password.json
|
||||||
|
chmod 777 -R $temp_dir
|
||||||
|
docker rm -f hana || true
|
||||||
|
docker run -d --name hana -p 39013:39013 -p 39017:39017 -p 39041-39045:39041-39045 -p 1128-1129:1128-1129 -p 59013-59014:59013-59014 \
|
||||||
|
--ulimit nofile=1048576:1048576 \
|
||||||
|
--sysctl kernel.shmmax=1073741824 \
|
||||||
|
--sysctl net.ipv4.ip_local_port_range='40000 60999' \
|
||||||
|
--sysctl kernel.shmmni=524288 \
|
||||||
|
--sysctl kernel.shmall=8388608 \
|
||||||
|
-v $temp_dir:/config \
|
||||||
|
store/saplabs/hanaexpress:2.00.045.00.20200121.1 \
|
||||||
|
--passwords-url file:///config/password.json \
|
||||||
|
--agree-to-sap-license
|
||||||
|
# Give the container some time to start
|
||||||
|
OUTPUT=
|
||||||
|
while [[ $OUTPUT != *"Startup finished"* ]]; do
|
||||||
|
echo "Waiting for HANA to start..."
|
||||||
|
sleep 10
|
||||||
|
OUTPUT=$(docker logs hana)
|
||||||
|
done
|
||||||
|
echo "HANA successfully started"
|
||||||
|
}
|
||||||
|
|
||||||
if [ -z ${1} ]; then
|
if [ -z ${1} ]; then
|
||||||
echo "No db name provided"
|
echo "No db name provided"
|
||||||
echo "Provide one of:"
|
echo "Provide one of:"
|
||||||
|
|
|
@ -38,7 +38,8 @@ ext {
|
||||||
'jdbc.driver': 'org.postgresql.Driver',
|
'jdbc.driver': 'org.postgresql.Driver',
|
||||||
'jdbc.user' : 'hibernate_orm_test',
|
'jdbc.user' : 'hibernate_orm_test',
|
||||||
'jdbc.pass' : 'hibernate_orm_test',
|
'jdbc.pass' : 'hibernate_orm_test',
|
||||||
'jdbc.url' : 'jdbc:postgresql:hibernate_orm_test'
|
// Disable prepared statement caching due to https://www.postgresql.org/message-id/CAEcMXhmmRd4-%2BNQbnjDT26XNdUoXdmntV9zdr8%3DTu8PL9aVCYg%40mail.gmail.com
|
||||||
|
'jdbc.url' : 'jdbc:postgresql://' + dbHost + '/hibernate_orm_test?preparedStatementCacheQueries=0'
|
||||||
],
|
],
|
||||||
pgsql_docker : [
|
pgsql_docker : [
|
||||||
'db.dialect' : 'org.hibernate.dialect.PostgreSQLDialect',
|
'db.dialect' : 'org.hibernate.dialect.PostgreSQLDialect',
|
||||||
|
@ -96,7 +97,8 @@ ext {
|
||||||
'jdbc.driver': 'org.postgresql.Driver',
|
'jdbc.driver': 'org.postgresql.Driver',
|
||||||
'jdbc.user' : 'hibernate_orm_test',
|
'jdbc.user' : 'hibernate_orm_test',
|
||||||
'jdbc.pass' : 'hibernate_orm_test',
|
'jdbc.pass' : 'hibernate_orm_test',
|
||||||
'jdbc.url' : 'jdbc:postgresql:hibernate_orm_test'
|
// Disable prepared statement caching due to https://www.postgresql.org/message-id/CAEcMXhmmRd4-%2BNQbnjDT26XNdUoXdmntV9zdr8%3DTu8PL9aVCYg%40mail.gmail.com
|
||||||
|
'jdbc.url' : 'jdbc:postgresql://' + dbHost + '/hibernate_orm_test?preparedStatementCacheQueries=0'
|
||||||
],
|
],
|
||||||
oracle : [
|
oracle : [
|
||||||
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
|
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
|
||||||
|
@ -105,15 +107,13 @@ ext {
|
||||||
'jdbc.pass' : 'hibernate_orm_test',
|
'jdbc.pass' : 'hibernate_orm_test',
|
||||||
'jdbc.url' : 'jdbc:oracle:thin:@localhost:1521/xe'
|
'jdbc.url' : 'jdbc:oracle:thin:@localhost:1521/xe'
|
||||||
],
|
],
|
||||||
// Uses the default settings for using https://hub.docker.com/_/oracle-database-enterprise-edition
|
// Use ./docker_db.sh oracle_ee to start the database
|
||||||
// After registering to get access (see instructions at above link), start it for testing with:
|
|
||||||
// docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name ORCLCDB -p 1521:1521 store/oracle/database-enterprise:12.2.0.1-slim
|
|
||||||
oracle_docker : [
|
oracle_docker : [
|
||||||
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
|
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
|
||||||
'jdbc.driver': 'oracle.jdbc.OracleDriver',
|
'jdbc.driver': 'oracle.jdbc.OracleDriver',
|
||||||
'jdbc.user' : 'sys as sysdba',
|
'jdbc.user' : 'c##hibernate_orm_test',
|
||||||
'jdbc.pass' : 'Oradoc_db1',
|
'jdbc.pass' : 'hibernate_orm_test',
|
||||||
'jdbc.url' : 'jdbc:oracle:thin:@localhost:1521:ORCLCDB'
|
'jdbc.url' : 'jdbc:oracle:thin:@' + dbHost + ':1521/ORCLPDB1.localdomain'
|
||||||
],
|
],
|
||||||
oracle_ci : [
|
oracle_ci : [
|
||||||
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
|
'db.dialect' : 'org.hibernate.dialect.OracleDialect',
|
||||||
|
@ -162,21 +162,32 @@ ext {
|
||||||
'jdbc.driver': 'com.sap.db.jdbc.Driver',
|
'jdbc.driver': 'com.sap.db.jdbc.Driver',
|
||||||
'jdbc.user' : 'HIBERNATE_TEST',
|
'jdbc.user' : 'HIBERNATE_TEST',
|
||||||
'jdbc.pass' : 'H1bernate_test',
|
'jdbc.pass' : 'H1bernate_test',
|
||||||
'jdbc.url' : 'jdbc:sap://localhost:30015/'
|
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
|
||||||
|
'jdbc.url' : 'jdbc:sap://localhost:30015/?statementCacheSize=0'
|
||||||
],
|
],
|
||||||
hana_cloud : [
|
hana_cloud : [
|
||||||
'db.dialect' : 'org.hibernate.dialect.HANACloudColumnStoreDialect',
|
'db.dialect' : 'org.hibernate.dialect.HANACloudColumnStoreDialect',
|
||||||
'jdbc.driver': 'com.sap.db.jdbc.Driver',
|
'jdbc.driver': 'com.sap.db.jdbc.Driver',
|
||||||
'jdbc.user' : 'HIBERNATE_TEST',
|
'jdbc.user' : 'HIBERNATE_TEST',
|
||||||
'jdbc.pass' : 'H1bernate_test',
|
'jdbc.pass' : 'H1bernate_test',
|
||||||
'jdbc.url' : 'jdbc:sap://localhost:443/?encrypt=true&validateCertificate=false'
|
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
|
||||||
|
'jdbc.url' : 'jdbc:sap://localhost:443/?encrypt=true&validateCertificate=false&statementCacheSize=0'
|
||||||
],
|
],
|
||||||
hana_vlad : [
|
hana_vlad : [
|
||||||
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
|
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
|
||||||
'jdbc.driver': 'com.sap.db.jdbc.Driver',
|
'jdbc.driver': 'com.sap.db.jdbc.Driver',
|
||||||
'jdbc.user' : 'VLAD',
|
'jdbc.user' : 'VLAD',
|
||||||
'jdbc.pass' : 'V1ad_test',
|
'jdbc.pass' : 'V1ad_test',
|
||||||
'jdbc.url' : 'jdbc:sap://localhost:39015/'
|
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
|
||||||
|
'jdbc.url' : 'jdbc:sap://localhost:39015/?statementCacheSize=0'
|
||||||
|
],
|
||||||
|
hana_docker : [
|
||||||
|
'db.dialect' : 'org.hibernate.dialect.HANAColumnStoreDialect',
|
||||||
|
'jdbc.driver': 'com.sap.db.jdbc.Driver',
|
||||||
|
'jdbc.user' : 'SYSTEM',
|
||||||
|
'jdbc.pass' : 'H1bernate_test',
|
||||||
|
// Disable prepared statement caching due to https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.04/en-US/78f2163887814223858e4369d18e2847.html
|
||||||
|
'jdbc.url' : 'jdbc:sap://' + dbHost + ':39017/?statementCacheSize=0'
|
||||||
],
|
],
|
||||||
cockroachdb : [
|
cockroachdb : [
|
||||||
'db.dialect' : 'org.hibernate.dialect.CockroachDialect',
|
'db.dialect' : 'org.hibernate.dialect.CockroachDialect',
|
||||||
|
|
|
@ -325,6 +325,8 @@ test {
|
||||||
systemProperty 'user.country', 'US'
|
systemProperty 'user.country', 'US'
|
||||||
systemProperty 'user.timezone', 'UTC'
|
systemProperty 'user.timezone', 'UTC'
|
||||||
systemProperty 'file.encoding', 'UTF-8'
|
systemProperty 'file.encoding', 'UTF-8'
|
||||||
|
// Needed for AdoptOpenJDK on alpine? The problem is similar to this: https://github.com/mockito/mockito/issues/978
|
||||||
|
jvmArgs '-XX:+StartAttachListener'
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enable the experimental features of ByteBuddy with JDK 15+
|
// Enable the experimental features of ByteBuddy with JDK 15+
|
||||||
|
|
|
@ -32,7 +32,7 @@ ext {
|
||||||
|
|
||||||
assertjVersion = '3.14.0'
|
assertjVersion = '3.14.0'
|
||||||
|
|
||||||
geolatteVersion = '1.4.0'
|
geolatteVersion = '1.6.1'
|
||||||
|
|
||||||
shrinkwrapVersion = '1.2.6'
|
shrinkwrapVersion = '1.2.6'
|
||||||
shrinkwrapDescriptorsVersion = '2.0.0'
|
shrinkwrapDescriptorsVersion = '2.0.0'
|
||||||
|
|
|
@ -20,6 +20,7 @@ import org.hibernate.event.service.spi.EventListenerGroup;
|
||||||
import org.hibernate.event.service.spi.EventListenerRegistry;
|
import org.hibernate.event.service.spi.EventListenerRegistry;
|
||||||
import org.hibernate.event.spi.EventSource;
|
import org.hibernate.event.spi.EventSource;
|
||||||
import org.hibernate.event.spi.EventType;
|
import org.hibernate.event.spi.EventType;
|
||||||
|
import org.hibernate.internal.FastSessionServices;
|
||||||
import org.hibernate.internal.util.StringHelper;
|
import org.hibernate.internal.util.StringHelper;
|
||||||
import org.hibernate.persister.collection.CollectionPersister;
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
import org.hibernate.pretty.MessageHelper;
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
@ -184,6 +185,11 @@ public abstract class CollectionAction implements Executable, Serializable, Comp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated This will be removed as it's not very efficient. If you need access to EventListenerGroup(s),
|
||||||
|
* use the direct references from {@link #getFastSessionServices()}.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
protected <T> EventListenerGroup<T> listenerGroup(EventType<T> eventType) {
|
protected <T> EventListenerGroup<T> listenerGroup(EventType<T> eventType) {
|
||||||
return getSession()
|
return getSession()
|
||||||
.getFactory()
|
.getFactory()
|
||||||
|
@ -195,4 +201,13 @@ public abstract class CollectionAction implements Executable, Serializable, Comp
|
||||||
protected EventSource eventSource() {
|
protected EventSource eventSource() {
|
||||||
return (EventSource) getSession();
|
return (EventSource) getSession();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method for all subclasses.
|
||||||
|
* @return the {@link FastSessionServices} instance from the SessionFactory.
|
||||||
|
*/
|
||||||
|
protected FastSessionServices getFastSessionServices() {
|
||||||
|
return session.getFactory().getFastSessionServices();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,8 +9,6 @@ package org.hibernate.action.internal;
|
||||||
import org.hibernate.HibernateException;
|
import org.hibernate.HibernateException;
|
||||||
import org.hibernate.collection.spi.PersistentCollection;
|
import org.hibernate.collection.spi.PersistentCollection;
|
||||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||||
import org.hibernate.event.service.spi.EventListenerGroup;
|
|
||||||
import org.hibernate.event.spi.EventType;
|
|
||||||
import org.hibernate.event.spi.PostCollectionRecreateEvent;
|
import org.hibernate.event.spi.PostCollectionRecreateEvent;
|
||||||
import org.hibernate.event.spi.PostCollectionRecreateEventListener;
|
import org.hibernate.event.spi.PostCollectionRecreateEventListener;
|
||||||
import org.hibernate.event.spi.PreCollectionRecreateEvent;
|
import org.hibernate.event.spi.PreCollectionRecreateEvent;
|
||||||
|
@ -58,24 +56,22 @@ public final class CollectionRecreateAction extends CollectionAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void preRecreate() {
|
private void preRecreate() {
|
||||||
final EventListenerGroup<PreCollectionRecreateEventListener> listenerGroup = listenerGroup( EventType.PRE_COLLECTION_RECREATE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_PRE_COLLECTION_RECREATE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPreCollectionRecreateEvent, PreCollectionRecreateEventListener::onPreRecreateCollection );
|
||||||
}
|
|
||||||
final PreCollectionRecreateEvent event = new PreCollectionRecreateEvent( getPersister(), getCollection(), eventSource() );
|
|
||||||
for ( PreCollectionRecreateEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPreRecreateCollection( event );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private PreCollectionRecreateEvent newPreCollectionRecreateEvent() {
|
||||||
|
return new PreCollectionRecreateEvent( getPersister(), getCollection(), eventSource() );
|
||||||
}
|
}
|
||||||
|
|
||||||
private void postRecreate() {
|
private void postRecreate() {
|
||||||
final EventListenerGroup<PostCollectionRecreateEventListener> listenerGroup = listenerGroup( EventType.POST_COLLECTION_RECREATE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_COLLECTION_RECREATE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostCollectionRecreateEvent, PostCollectionRecreateEventListener::onPostRecreateCollection );
|
||||||
}
|
|
||||||
final PostCollectionRecreateEvent event = new PostCollectionRecreateEvent( getPersister(), getCollection(), eventSource() );
|
|
||||||
for ( PostCollectionRecreateEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPostRecreateCollection( event );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private PostCollectionRecreateEvent newPostCollectionRecreateEvent() {
|
||||||
|
return new PostCollectionRecreateEvent( getPersister(), getCollection(), eventSource() );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,34 +112,33 @@ public final class CollectionRemoveAction extends CollectionAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void preRemove() {
|
private void preRemove() {
|
||||||
final EventListenerGroup<PreCollectionRemoveEventListener> listenerGroup = listenerGroup( EventType.PRE_COLLECTION_REMOVE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_PRE_COLLECTION_REMOVE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPreCollectionRemoveEvent, PreCollectionRemoveEventListener::onPreRemoveCollection );
|
||||||
}
|
}
|
||||||
final PreCollectionRemoveEvent event = new PreCollectionRemoveEvent(
|
|
||||||
|
private PreCollectionRemoveEvent newPreCollectionRemoveEvent() {
|
||||||
|
return new PreCollectionRemoveEvent(
|
||||||
getPersister(),
|
getPersister(),
|
||||||
getCollection(),
|
getCollection(),
|
||||||
eventSource(),
|
eventSource(),
|
||||||
affectedOwner
|
affectedOwner
|
||||||
);
|
);
|
||||||
for ( PreCollectionRemoveEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPreRemoveCollection( event );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void postRemove() {
|
private void postRemove() {
|
||||||
final EventListenerGroup<PostCollectionRemoveEventListener> listenerGroup = listenerGroup( EventType.POST_COLLECTION_REMOVE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_COLLECTION_REMOVE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostCollectionRemoveEvent, PostCollectionRemoveEventListener::onPostRemoveCollection );
|
||||||
}
|
}
|
||||||
final PostCollectionRemoveEvent event = new PostCollectionRemoveEvent(
|
|
||||||
|
private PostCollectionRemoveEvent newPostCollectionRemoveEvent() {
|
||||||
|
return new PostCollectionRemoveEvent(
|
||||||
getPersister(),
|
getPersister(),
|
||||||
getCollection(),
|
getCollection(),
|
||||||
eventSource(),
|
eventSource(),
|
||||||
affectedOwner
|
affectedOwner
|
||||||
);
|
);
|
||||||
for ( PostCollectionRemoveEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPostRemoveCollection( event );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,32 +97,31 @@ public final class CollectionUpdateAction extends CollectionAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void preUpdate() {
|
private void preUpdate() {
|
||||||
final EventListenerGroup<PreCollectionUpdateEventListener> listenerGroup = listenerGroup( EventType.PRE_COLLECTION_UPDATE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_PRE_COLLECTION_UPDATE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPreCollectionUpdateEvent, PreCollectionUpdateEventListener::onPreUpdateCollection );
|
||||||
}
|
}
|
||||||
final PreCollectionUpdateEvent event = new PreCollectionUpdateEvent(
|
|
||||||
|
private PreCollectionUpdateEvent newPreCollectionUpdateEvent() {
|
||||||
|
return new PreCollectionUpdateEvent(
|
||||||
getPersister(),
|
getPersister(),
|
||||||
getCollection(),
|
getCollection(),
|
||||||
eventSource()
|
eventSource()
|
||||||
);
|
);
|
||||||
for ( PreCollectionUpdateEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPreUpdateCollection( event );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void postUpdate() {
|
private void postUpdate() {
|
||||||
final EventListenerGroup<PostCollectionUpdateEventListener> listenerGroup = listenerGroup( EventType.POST_COLLECTION_UPDATE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_COLLECTION_UPDATE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostCollectionUpdateEvent, PostCollectionUpdateEventListener::onPostUpdateCollection );
|
||||||
}
|
}
|
||||||
final PostCollectionUpdateEvent event = new PostCollectionUpdateEvent(
|
|
||||||
|
private PostCollectionUpdateEvent newPostCollectionUpdateEvent() {
|
||||||
|
return new PostCollectionUpdateEvent(
|
||||||
getPersister(),
|
getPersister(),
|
||||||
getCollection(),
|
getCollection(),
|
||||||
eventSource()
|
eventSource()
|
||||||
);
|
);
|
||||||
for ( PostCollectionUpdateEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPostUpdateCollection( event );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,12 +18,11 @@ import org.hibernate.event.service.spi.EventListenerGroup;
|
||||||
import org.hibernate.event.service.spi.EventListenerRegistry;
|
import org.hibernate.event.service.spi.EventListenerRegistry;
|
||||||
import org.hibernate.event.spi.EventSource;
|
import org.hibernate.event.spi.EventSource;
|
||||||
import org.hibernate.event.spi.EventType;
|
import org.hibernate.event.spi.EventType;
|
||||||
|
import org.hibernate.internal.FastSessionServices;
|
||||||
import org.hibernate.internal.util.StringHelper;
|
import org.hibernate.internal.util.StringHelper;
|
||||||
import org.hibernate.persister.entity.EntityPersister;
|
import org.hibernate.persister.entity.EntityPersister;
|
||||||
import org.hibernate.pretty.MessageHelper;
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for actions relating to insert/update/delete of an entity
|
* Base class for actions relating to insert/update/delete of an entity
|
||||||
* instance.
|
* instance.
|
||||||
|
@ -32,7 +31,6 @@ import org.jboss.logging.Logger;
|
||||||
*/
|
*/
|
||||||
public abstract class EntityAction
|
public abstract class EntityAction
|
||||||
implements Executable, Serializable, Comparable, AfterTransactionCompletionProcess {
|
implements Executable, Serializable, Comparable, AfterTransactionCompletionProcess {
|
||||||
private static final Logger LOG = Logger.getLogger(EntityAction.class);
|
|
||||||
|
|
||||||
private final String entityName;
|
private final String entityName;
|
||||||
private final Object id;
|
private final Object id;
|
||||||
|
@ -189,6 +187,11 @@ public abstract class EntityAction
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated This will be removed as it's not very efficient. If you need access to EventListenerGroup(s),
|
||||||
|
* use the direct references from {@link #getFastSessionServices()}.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
protected <T> EventListenerGroup<T> listenerGroup(EventType<T> eventType) {
|
protected <T> EventListenerGroup<T> listenerGroup(EventType<T> eventType) {
|
||||||
return getSession()
|
return getSession()
|
||||||
.getFactory()
|
.getFactory()
|
||||||
|
@ -200,4 +203,13 @@ public abstract class EntityAction
|
||||||
protected EventSource eventSource() {
|
protected EventSource eventSource() {
|
||||||
return (EventSource) getSession();
|
return (EventSource) getSession();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method for all subclasses.
|
||||||
|
* @return the {@link FastSessionServices} instance from the SessionFactory.
|
||||||
|
*/
|
||||||
|
protected FastSessionServices getFastSessionServices() {
|
||||||
|
return session.getFactory().getFastSessionServices();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,6 @@ import org.hibernate.engine.spi.PersistenceContext;
|
||||||
import org.hibernate.engine.spi.SessionImplementor;
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||||
import org.hibernate.event.service.spi.EventListenerGroup;
|
import org.hibernate.event.service.spi.EventListenerGroup;
|
||||||
import org.hibernate.event.spi.EventType;
|
|
||||||
import org.hibernate.event.spi.PostCommitDeleteEventListener;
|
import org.hibernate.event.spi.PostCommitDeleteEventListener;
|
||||||
import org.hibernate.event.spi.PostDeleteEvent;
|
import org.hibernate.event.spi.PostDeleteEvent;
|
||||||
import org.hibernate.event.spi.PostDeleteEventListener;
|
import org.hibernate.event.spi.PostDeleteEventListener;
|
||||||
|
@ -157,7 +156,7 @@ public class EntityDeleteAction extends EntityAction {
|
||||||
|
|
||||||
protected boolean preDelete() {
|
protected boolean preDelete() {
|
||||||
boolean veto = false;
|
boolean veto = false;
|
||||||
final EventListenerGroup<PreDeleteEventListener> listenerGroup = listenerGroup( EventType.PRE_DELETE );
|
final EventListenerGroup<PreDeleteEventListener> listenerGroup = getFastSessionServices().eventListenerGroup_PRE_DELETE;
|
||||||
if ( listenerGroup.isEmpty() ) {
|
if ( listenerGroup.isEmpty() ) {
|
||||||
return veto;
|
return veto;
|
||||||
}
|
}
|
||||||
|
@ -169,49 +168,41 @@ public class EntityDeleteAction extends EntityAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postDelete() {
|
protected void postDelete() {
|
||||||
final EventListenerGroup<PostDeleteEventListener> listenerGroup = listenerGroup( EventType.POST_DELETE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_DELETE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostDeleteEvent, PostDeleteEventListener::onPostDelete );
|
||||||
}
|
}
|
||||||
final PostDeleteEvent event = new PostDeleteEvent(
|
|
||||||
|
PostDeleteEvent newPostDeleteEvent() {
|
||||||
|
return new PostDeleteEvent(
|
||||||
getInstance(),
|
getInstance(),
|
||||||
getId(),
|
getId(),
|
||||||
state,
|
state,
|
||||||
getPersister(),
|
getPersister(),
|
||||||
eventSource()
|
eventSource()
|
||||||
);
|
);
|
||||||
for ( PostDeleteEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPostDelete( event );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postCommitDelete(boolean success) {
|
protected void postCommitDelete(boolean success) {
|
||||||
final EventListenerGroup<PostDeleteEventListener> listenerGroup = listenerGroup( EventType.POST_COMMIT_DELETE );
|
final EventListenerGroup<PostDeleteEventListener> eventListeners = getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_COMMIT_DELETE;
|
||||||
return;
|
if (success) {
|
||||||
}
|
eventListeners.fireLazyEventOnEachListener( this::newPostDeleteEvent, PostDeleteEventListener::onPostDelete );
|
||||||
final PostDeleteEvent event = new PostDeleteEvent(
|
|
||||||
getInstance(),
|
|
||||||
getId(),
|
|
||||||
state,
|
|
||||||
getPersister(),
|
|
||||||
eventSource()
|
|
||||||
);
|
|
||||||
for ( PostDeleteEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
if ( PostCommitDeleteEventListener.class.isInstance( listener ) ) {
|
|
||||||
if ( success ) {
|
|
||||||
listener.onPostDelete( event );
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
((PostCommitDeleteEventListener) listener).onPostDeleteCommitFailed( event );
|
eventListeners.fireLazyEventOnEachListener( this::newPostDeleteEvent, EntityDeleteAction::postCommitDeleteOnUnsuccessful );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void postCommitDeleteOnUnsuccessful(PostDeleteEventListener listener, PostDeleteEvent event) {
|
||||||
|
if ( listener instanceof PostCommitDeleteEventListener ) {
|
||||||
|
( (PostCommitDeleteEventListener) listener ).onPostDeleteCommitFailed( event );
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
//default to the legacy implementation that always fires the event
|
//default to the legacy implementation that always fires the event
|
||||||
listener.onPostDelete( event );
|
listener.onPostDelete( event );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doAfterTransactionCompletion(boolean success, SharedSessionContractImplementor session) throws HibernateException {
|
public void doAfterTransactionCompletion(boolean success, SharedSessionContractImplementor session) throws HibernateException {
|
||||||
|
@ -231,7 +222,7 @@ public class EntityDeleteAction extends EntityAction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean hasPostCommitEventListeners() {
|
protected boolean hasPostCommitEventListeners() {
|
||||||
final EventListenerGroup<PostDeleteEventListener> group = listenerGroup( EventType.POST_COMMIT_DELETE );
|
final EventListenerGroup<PostDeleteEventListener> group = getFastSessionServices().eventListenerGroup_POST_COMMIT_DELETE;
|
||||||
for ( PostDeleteEventListener listener : group.listeners() ) {
|
for ( PostDeleteEventListener listener : group.listeners() ) {
|
||||||
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -119,7 +119,7 @@ public class EntityIdentityInsertAction extends AbstractEntityInsertAction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean hasPostCommitEventListeners() {
|
protected boolean hasPostCommitEventListeners() {
|
||||||
final EventListenerGroup<PostInsertEventListener> group = listenerGroup( EventType.POST_COMMIT_INSERT );
|
final EventListenerGroup<PostInsertEventListener> group = getFastSessionServices().eventListenerGroup_POST_COMMIT_INSERT;
|
||||||
for ( PostInsertEventListener listener : group.listeners() ) {
|
for ( PostInsertEventListener listener : group.listeners() ) {
|
||||||
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -140,57 +140,42 @@ public class EntityIdentityInsertAction extends AbstractEntityInsertAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postInsert() {
|
protected void postInsert() {
|
||||||
final EventSource eventSource = eventSource();
|
|
||||||
if ( isDelayed ) {
|
if ( isDelayed ) {
|
||||||
eventSource.getPersistenceContextInternal().replaceDelayedEntityIdentityInsertKeys( delayedEntityKey, generatedId );
|
eventSource().getPersistenceContextInternal().replaceDelayedEntityIdentityInsertKeys( delayedEntityKey, generatedId );
|
||||||
|
}
|
||||||
|
getFastSessionServices()
|
||||||
|
.eventListenerGroup_POST_INSERT
|
||||||
|
.fireLazyEventOnEachListener( this::newPostInsertEvent, PostInsertEventListener::onPostInsert );
|
||||||
}
|
}
|
||||||
|
|
||||||
final EventListenerGroup<PostInsertEventListener> listenerGroup = listenerGroup( EventType.POST_INSERT );
|
PostInsertEvent newPostInsertEvent() {
|
||||||
if ( listenerGroup.isEmpty() ) {
|
return new PostInsertEvent(
|
||||||
return;
|
|
||||||
}
|
|
||||||
final PostInsertEvent event = new PostInsertEvent(
|
|
||||||
getInstance(),
|
|
||||||
generatedId,
|
|
||||||
getState(),
|
|
||||||
getPersister(),
|
|
||||||
eventSource
|
|
||||||
);
|
|
||||||
for ( PostInsertEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPostInsert( event );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void postCommitInsert(boolean success) {
|
|
||||||
final EventListenerGroup<PostInsertEventListener> listenerGroup = listenerGroup( EventType.POST_COMMIT_INSERT );
|
|
||||||
if ( listenerGroup.isEmpty() ) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
final PostInsertEvent event = new PostInsertEvent(
|
|
||||||
getInstance(),
|
getInstance(),
|
||||||
generatedId,
|
generatedId,
|
||||||
getState(),
|
getState(),
|
||||||
getPersister(),
|
getPersister(),
|
||||||
eventSource()
|
eventSource()
|
||||||
);
|
);
|
||||||
for ( PostInsertEventListener listener : listenerGroup.listeners() ) {
|
}
|
||||||
|
|
||||||
|
protected void postCommitInsert(boolean success) {
|
||||||
|
getFastSessionServices()
|
||||||
|
.eventListenerGroup_POST_COMMIT_INSERT
|
||||||
|
.fireLazyEventOnEachListener( this::newPostInsertEvent, success ? PostInsertEventListener::onPostInsert : this::postCommitInsertOnFailure );
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postCommitInsertOnFailure(PostInsertEventListener listener, PostInsertEvent event) {
|
||||||
if ( listener instanceof PostCommitInsertEventListener ) {
|
if ( listener instanceof PostCommitInsertEventListener ) {
|
||||||
if ( success ) {
|
|
||||||
listener.onPostInsert( event );
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
((PostCommitInsertEventListener) listener).onPostInsertCommitFailed( event );
|
((PostCommitInsertEventListener) listener).onPostInsertCommitFailed( event );
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
//default to the legacy implementation that always fires the event
|
//default to the legacy implementation that always fires the event
|
||||||
listener.onPostInsert( event );
|
listener.onPostInsert( event );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
protected boolean preInsert() {
|
protected boolean preInsert() {
|
||||||
final EventListenerGroup<PreInsertEventListener> listenerGroup = listenerGroup( EventType.PRE_INSERT );
|
final EventListenerGroup<PreInsertEventListener> listenerGroup = getFastSessionServices().eventListenerGroup_PRE_INSERT;
|
||||||
if ( listenerGroup.isEmpty() ) {
|
if ( listenerGroup.isEmpty() ) {
|
||||||
// NO_VETO
|
// NO_VETO
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -168,54 +168,41 @@ public class EntityInsertAction extends AbstractEntityInsertAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postInsert() {
|
protected void postInsert() {
|
||||||
final EventListenerGroup<PostInsertEventListener> listenerGroup = listenerGroup( EventType.POST_INSERT );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_INSERT
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostInsertEvent, PostInsertEventListener::onPostInsert );
|
||||||
}
|
}
|
||||||
final PostInsertEvent event = new PostInsertEvent(
|
|
||||||
|
private PostInsertEvent newPostInsertEvent() {
|
||||||
|
return new PostInsertEvent(
|
||||||
getInstance(),
|
getInstance(),
|
||||||
getId(),
|
getId(),
|
||||||
getState(),
|
getState(),
|
||||||
getPersister(),
|
getPersister(),
|
||||||
eventSource()
|
eventSource()
|
||||||
);
|
);
|
||||||
for ( PostInsertEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPostInsert( event );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postCommitInsert(boolean success) {
|
protected void postCommitInsert(boolean success) {
|
||||||
final EventListenerGroup<PostInsertEventListener> listenerGroup = listenerGroup( EventType.POST_COMMIT_INSERT );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_COMMIT_INSERT
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostInsertEvent, success ? PostInsertEventListener::onPostInsert : this::postCommitOnFailure );
|
||||||
}
|
}
|
||||||
final PostInsertEvent event = new PostInsertEvent(
|
|
||||||
getInstance(),
|
private void postCommitOnFailure(PostInsertEventListener listener, PostInsertEvent event) {
|
||||||
getId(),
|
if ( listener instanceof PostCommitInsertEventListener ) {
|
||||||
getState(),
|
|
||||||
getPersister(),
|
|
||||||
eventSource()
|
|
||||||
);
|
|
||||||
for ( PostInsertEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
if ( PostCommitInsertEventListener.class.isInstance( listener ) ) {
|
|
||||||
if ( success ) {
|
|
||||||
listener.onPostInsert( event );
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
((PostCommitInsertEventListener) listener).onPostInsertCommitFailed( event );
|
((PostCommitInsertEventListener) listener).onPostInsertCommitFailed( event );
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
//default to the legacy implementation that always fires the event
|
//default to the legacy implementation that always fires the event
|
||||||
listener.onPostInsert( event );
|
listener.onPostInsert( event );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
protected boolean preInsert() {
|
protected boolean preInsert() {
|
||||||
boolean veto = false;
|
boolean veto = false;
|
||||||
|
|
||||||
final EventListenerGroup<PreInsertEventListener> listenerGroup = listenerGroup( EventType.PRE_INSERT );
|
final EventListenerGroup<PreInsertEventListener> listenerGroup = getFastSessionServices().eventListenerGroup_PRE_INSERT;
|
||||||
if ( listenerGroup.isEmpty() ) {
|
if ( listenerGroup.isEmpty() ) {
|
||||||
return veto;
|
return veto;
|
||||||
}
|
}
|
||||||
|
@ -260,13 +247,12 @@ public class EntityInsertAction extends AbstractEntityInsertAction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean hasPostCommitEventListeners() {
|
protected boolean hasPostCommitEventListeners() {
|
||||||
final EventListenerGroup<PostInsertEventListener> group = listenerGroup( EventType.POST_COMMIT_INSERT );
|
final EventListenerGroup<PostInsertEventListener> group = getFastSessionServices().eventListenerGroup_POST_COMMIT_INSERT;
|
||||||
for ( PostInsertEventListener listener : group.listeners() ) {
|
for ( PostInsertEventListener listener : group.listeners() ) {
|
||||||
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -253,7 +253,7 @@ public class EntityUpdateAction extends EntityAction {
|
||||||
|
|
||||||
protected boolean preUpdate() {
|
protected boolean preUpdate() {
|
||||||
boolean veto = false;
|
boolean veto = false;
|
||||||
final EventListenerGroup<PreUpdateEventListener> listenerGroup = listenerGroup( EventType.PRE_UPDATE );
|
final EventListenerGroup<PreUpdateEventListener> listenerGroup = getFastSessionServices().eventListenerGroup_PRE_UPDATE;
|
||||||
if ( listenerGroup.isEmpty() ) {
|
if ( listenerGroup.isEmpty() ) {
|
||||||
return veto;
|
return veto;
|
||||||
}
|
}
|
||||||
|
@ -272,11 +272,13 @@ public class EntityUpdateAction extends EntityAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postUpdate() {
|
protected void postUpdate() {
|
||||||
final EventListenerGroup<PostUpdateEventListener> listenerGroup = listenerGroup( EventType.POST_UPDATE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_UPDATE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostUpdateEvent, PostUpdateEventListener::onPostUpdate );
|
||||||
}
|
}
|
||||||
final PostUpdateEvent event = new PostUpdateEvent(
|
|
||||||
|
private PostUpdateEvent newPostUpdateEvent() {
|
||||||
|
return new PostUpdateEvent(
|
||||||
getInstance(),
|
getInstance(),
|
||||||
getId(),
|
getId(),
|
||||||
state,
|
state,
|
||||||
|
@ -285,44 +287,27 @@ public class EntityUpdateAction extends EntityAction {
|
||||||
getPersister(),
|
getPersister(),
|
||||||
eventSource()
|
eventSource()
|
||||||
);
|
);
|
||||||
for ( PostUpdateEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
listener.onPostUpdate( event );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postCommitUpdate(boolean success) {
|
protected void postCommitUpdate(boolean success) {
|
||||||
final EventListenerGroup<PostUpdateEventListener> listenerGroup = listenerGroup( EventType.POST_COMMIT_UPDATE );
|
getFastSessionServices()
|
||||||
if ( listenerGroup.isEmpty() ) {
|
.eventListenerGroup_POST_COMMIT_UPDATE
|
||||||
return;
|
.fireLazyEventOnEachListener( this::newPostUpdateEvent, success ? PostUpdateEventListener::onPostUpdate : this::onPostCommitFailure );
|
||||||
}
|
}
|
||||||
final PostUpdateEvent event = new PostUpdateEvent(
|
|
||||||
getInstance(),
|
private void onPostCommitFailure(PostUpdateEventListener listener, PostUpdateEvent event) {
|
||||||
getId(),
|
if ( listener instanceof PostCommitUpdateEventListener ) {
|
||||||
state,
|
|
||||||
previousState,
|
|
||||||
dirtyFields,
|
|
||||||
getPersister(),
|
|
||||||
eventSource()
|
|
||||||
);
|
|
||||||
for ( PostUpdateEventListener listener : listenerGroup.listeners() ) {
|
|
||||||
if ( PostCommitUpdateEventListener.class.isInstance( listener ) ) {
|
|
||||||
if ( success ) {
|
|
||||||
listener.onPostUpdate( event );
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
((PostCommitUpdateEventListener) listener).onPostUpdateCommitFailed( event );
|
((PostCommitUpdateEventListener) listener).onPostUpdateCommitFailed( event );
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
//default to the legacy implementation that always fires the event
|
//default to the legacy implementation that always fires the event
|
||||||
listener.onPostUpdate( event );
|
listener.onPostUpdate( event );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean hasPostCommitEventListeners() {
|
protected boolean hasPostCommitEventListeners() {
|
||||||
final EventListenerGroup<PostUpdateEventListener> group = listenerGroup( EventType.POST_COMMIT_UPDATE );
|
final EventListenerGroup<PostUpdateEventListener> group = getFastSessionServices().eventListenerGroup_POST_COMMIT_UPDATE;
|
||||||
for ( PostUpdateEventListener listener : group.listeners() ) {
|
for ( PostUpdateEventListener listener : group.listeners() ) {
|
||||||
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
if ( listener.requiresPostCommitHandling( getPersister() ) ) {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -35,6 +35,7 @@ public class EnhancementAsProxyLazinessInterceptor extends AbstractLazyLoadInter
|
||||||
|
|
||||||
private final boolean inLineDirtyChecking;
|
private final boolean inLineDirtyChecking;
|
||||||
private Set<String> writtenFieldNames;
|
private Set<String> writtenFieldNames;
|
||||||
|
private Set<String> collectionAttributeNames;
|
||||||
|
|
||||||
private Status status;
|
private Status status;
|
||||||
|
|
||||||
|
@ -57,11 +58,22 @@ public class EnhancementAsProxyLazinessInterceptor extends AbstractLazyLoadInter
|
||||||
this.entityKey = entityKey;
|
this.entityKey = entityKey;
|
||||||
|
|
||||||
final EntityPersister entityPersister = session.getFactory().getMetamodel().entityPersister( entityName );
|
final EntityPersister entityPersister = session.getFactory().getMetamodel().entityPersister( entityName );
|
||||||
|
if ( entityPersister.hasCollections() ) {
|
||||||
|
Type[] propertyTypes = entityPersister.getPropertyTypes();
|
||||||
|
collectionAttributeNames = new HashSet<>();
|
||||||
|
for ( int i = 0; i < propertyTypes.length; i++ ) {
|
||||||
|
Type propertyType = propertyTypes[i];
|
||||||
|
if ( propertyType.isCollectionType() ) {
|
||||||
|
collectionAttributeNames.add( entityPersister.getPropertyNames()[i] );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
this.inLineDirtyChecking = entityPersister.getEntityMode() == EntityMode.POJO
|
this.inLineDirtyChecking = entityPersister.getEntityMode() == EntityMode.POJO
|
||||||
&& SelfDirtinessTracker.class.isAssignableFrom( entityPersister.getMappedClass() );
|
&& SelfDirtinessTracker.class.isAssignableFrom( entityPersister.getMappedClass() );
|
||||||
// if self-dirty tracking is enabled but DynamicUpdate is not enabled then we need to initialise the entity
|
// if self-dirty tracking is enabled but DynamicUpdate is not enabled then we need to initialise the entity
|
||||||
// because the pre-computed update statement contains even not dirty properties and so we need all the values
|
// because the pre-computed update statement contains even not dirty properties and so we need all the values
|
||||||
initializeBeforeWrite = !inLineDirtyChecking || !entityPersister.getEntityMetamodel().isDynamicUpdate();
|
initializeBeforeWrite = !( inLineDirtyChecking && entityPersister.getEntityMetamodel().isDynamicUpdate() );
|
||||||
status = Status.UNINITIALIZED;
|
status = Status.UNINITIALIZED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,7 +257,8 @@ public class EnhancementAsProxyLazinessInterceptor extends AbstractLazyLoadInter
|
||||||
return newValue;
|
return newValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( initializeBeforeWrite ) {
|
if ( initializeBeforeWrite
|
||||||
|
|| ( collectionAttributeNames != null && collectionAttributeNames.contains( attributeName ) ) ) {
|
||||||
// we need to force-initialize the proxy - the fetch group to which the `attributeName` belongs
|
// we need to force-initialize the proxy - the fetch group to which the `attributeName` belongs
|
||||||
try {
|
try {
|
||||||
forceInitialize( target, attributeName );
|
forceInitialize( target, attributeName );
|
||||||
|
@ -267,6 +280,8 @@ public class EnhancementAsProxyLazinessInterceptor extends AbstractLazyLoadInter
|
||||||
writtenFieldNames = new HashSet<>();
|
writtenFieldNames = new HashSet<>();
|
||||||
}
|
}
|
||||||
writtenFieldNames.add( attributeName );
|
writtenFieldNames.add( attributeName );
|
||||||
|
|
||||||
|
( (SelfDirtinessTracker) target ).$$_hibernate_trackChange( attributeName );
|
||||||
}
|
}
|
||||||
|
|
||||||
return newValue;
|
return newValue;
|
||||||
|
@ -323,6 +338,10 @@ public class EnhancementAsProxyLazinessInterceptor extends AbstractLazyLoadInter
|
||||||
status = Status.INITIALIZED;
|
status = Status.INITIALIZED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean hasWrittenFieldNames() {
|
||||||
|
return writtenFieldNames != null && writtenFieldNames.size() != 0;
|
||||||
|
}
|
||||||
|
|
||||||
private enum Status {
|
private enum Status {
|
||||||
UNINITIALIZED,
|
UNINITIALIZED,
|
||||||
INITIALIZING,
|
INITIALIZING,
|
||||||
|
|
|
@ -149,9 +149,8 @@ public class StandardCacheEntryImpl implements CacheEntry {
|
||||||
|
|
||||||
final EventListenerGroup<PreLoadEventListener> listenerGroup = session
|
final EventListenerGroup<PreLoadEventListener> listenerGroup = session
|
||||||
.getFactory()
|
.getFactory()
|
||||||
.getServiceRegistry()
|
.getFastSessionServices()
|
||||||
.getService( EventListenerRegistry.class )
|
.eventListenerGroup_PRE_LOAD;
|
||||||
.getEventListenerGroup( EventType.PRE_LOAD );
|
|
||||||
for ( PreLoadEventListener listener : listenerGroup.listeners() ) {
|
for ( PreLoadEventListener listener : listenerGroup.listeners() ) {
|
||||||
listener.onPreLoad( preLoadEvent );
|
listener.onPreLoad( preLoadEvent );
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,6 +132,11 @@ public class HANAColumnStoreDialect extends AbstractHANADialect {
|
||||||
protected String getTruncateIdTableCommand() {
|
protected String getTruncateIdTableCommand() {
|
||||||
return "truncate table";
|
return "truncate table";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getCreateOptions() {
|
||||||
|
return "on commit delete rows";
|
||||||
|
}
|
||||||
},
|
},
|
||||||
AfterUseAction.CLEAN,
|
AfterUseAction.CLEAN,
|
||||||
runtimeModelCreationContext.getSessionFactory()
|
runtimeModelCreationContext.getSessionFactory()
|
||||||
|
|
|
@ -50,6 +50,11 @@ public class HANARowStoreDialect extends AbstractHANADialect {
|
||||||
protected String getCreateCommand() {
|
protected String getCreateCommand() {
|
||||||
return "create global temporary row table";
|
return "create global temporary row table";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getCreateOptions() {
|
||||||
|
return "on commit delete rows";
|
||||||
|
}
|
||||||
},
|
},
|
||||||
AfterUseAction.CLEAN,
|
AfterUseAction.CLEAN,
|
||||||
runtimeModelCreationContext.getSessionFactory()
|
runtimeModelCreationContext.getSessionFactory()
|
||||||
|
|
|
@ -328,8 +328,12 @@ public abstract class AbstractEntityEntry implements Serializable, EntityEntry {
|
||||||
final PersistentAttributeInterceptor interceptor = interceptable.$$_hibernate_getInterceptor();
|
final PersistentAttributeInterceptor interceptor = interceptable.$$_hibernate_getInterceptor();
|
||||||
if ( interceptor instanceof EnhancementAsProxyLazinessInterceptor ) {
|
if ( interceptor instanceof EnhancementAsProxyLazinessInterceptor ) {
|
||||||
EnhancementAsProxyLazinessInterceptor enhancementAsProxyLazinessInterceptor = (EnhancementAsProxyLazinessInterceptor) interceptor;
|
EnhancementAsProxyLazinessInterceptor enhancementAsProxyLazinessInterceptor = (EnhancementAsProxyLazinessInterceptor) interceptor;
|
||||||
|
if ( enhancementAsProxyLazinessInterceptor.hasWrittenFieldNames() ) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
// When a proxy has dirty attributes, we have to treat it like a normal entity to flush changes
|
// When a proxy has dirty attributes, we have to treat it like a normal entity to flush changes
|
||||||
uninitializedProxy = !enhancementAsProxyLazinessInterceptor.isInitialized() && !( (SelfDirtinessTracker) entity ).$$_hibernate_hasDirtyAttributes();
|
return !enhancementAsProxyLazinessInterceptor.isInitialized()
|
||||||
|
|| !persister.hasCollections() && !( (SelfDirtinessTracker) entity ).$$_hibernate_hasDirtyAttributes();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if ( entity instanceof HibernateProxy ) {
|
else if ( entity instanceof HibernateProxy ) {
|
||||||
|
|
|
@ -119,9 +119,8 @@ public final class TwoPhaseLoad {
|
||||||
final PreLoadEvent preLoadEvent) {
|
final PreLoadEvent preLoadEvent) {
|
||||||
final EventListenerGroup<PreLoadEventListener> listenerGroup = session
|
final EventListenerGroup<PreLoadEventListener> listenerGroup = session
|
||||||
.getFactory()
|
.getFactory()
|
||||||
.getServiceRegistry()
|
.getFastSessionServices()
|
||||||
.getService( EventListenerRegistry.class )
|
.eventListenerGroup_PRE_LOAD;
|
||||||
.getEventListenerGroup( EventType.PRE_LOAD );
|
|
||||||
final Iterable<PreLoadEventListener> listeners = listenerGroup.listeners();
|
final Iterable<PreLoadEventListener> listeners = listenerGroup.listeners();
|
||||||
initializeEntity( entity, readOnly, session, preLoadEvent, listeners, EntityResolver.DEFAULT );
|
initializeEntity( entity, readOnly, session, preLoadEvent, listeners, EntityResolver.DEFAULT );
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.hibernate.engine.spi.EntityEntry;
|
||||||
import org.hibernate.engine.spi.PersistenceContext;
|
import org.hibernate.engine.spi.PersistenceContext;
|
||||||
import org.hibernate.engine.spi.SessionImplementor;
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
import org.hibernate.engine.spi.Status;
|
import org.hibernate.engine.spi.Status;
|
||||||
|
import org.hibernate.event.service.spi.EventListenerGroup;
|
||||||
import org.hibernate.event.service.spi.EventListenerRegistry;
|
import org.hibernate.event.service.spi.EventListenerRegistry;
|
||||||
import org.hibernate.event.service.spi.JpaBootstrapSensitive;
|
import org.hibernate.event.service.spi.JpaBootstrapSensitive;
|
||||||
import org.hibernate.event.spi.EventSource;
|
import org.hibernate.event.spi.EventSource;
|
||||||
|
@ -205,10 +206,8 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
||||||
LOG.trace( "Flushing entities and processing referenced collections" );
|
LOG.trace( "Flushing entities and processing referenced collections" );
|
||||||
|
|
||||||
final EventSource source = event.getSession();
|
final EventSource source = event.getSession();
|
||||||
final Iterable<FlushEntityEventListener> flushListeners = source.getFactory().getServiceRegistry()
|
final EventListenerGroup<FlushEntityEventListener> flushListeners = source.getFactory()
|
||||||
.getService( EventListenerRegistry.class )
|
.getFastSessionServices().eventListenerGroup_FLUSH_ENTITY;
|
||||||
.getEventListenerGroup( EventType.FLUSH_ENTITY )
|
|
||||||
.listeners();
|
|
||||||
|
|
||||||
// Among other things, updateReachables() will recursively load all
|
// Among other things, updateReachables() will recursively load all
|
||||||
// collections that are moving roles. This might cause entities to
|
// collections that are moving roles. This might cause entities to
|
||||||
|
@ -228,9 +227,7 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
||||||
|
|
||||||
if ( status != Status.LOADING && status != Status.GONE ) {
|
if ( status != Status.LOADING && status != Status.GONE ) {
|
||||||
final FlushEntityEvent entityEvent = new FlushEntityEvent( source, me.getKey(), entry );
|
final FlushEntityEvent entityEvent = new FlushEntityEvent( source, me.getKey(), entry );
|
||||||
for ( FlushEntityEventListener listener : flushListeners ) {
|
flushListeners.fireEventOnEachListener( entityEvent, FlushEntityEventListener::onFlushEntity );
|
||||||
listener.onFlushEntity( entityEvent );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,7 @@ public interface EventListenerGroup<T> extends Serializable {
|
||||||
* Fires an event on each registered event listener of this group.
|
* Fires an event on each registered event listener of this group.
|
||||||
*
|
*
|
||||||
* Implementation note (performance):
|
* Implementation note (performance):
|
||||||
* the first argument is a supplier so that events can avoid allocation when no listener is registered.
|
* the first argument is a supplier so that events can avoid being created when no listener is registered.
|
||||||
* the second argument is specifically designed to avoid needing a capturing lambda.
|
* the second argument is specifically designed to avoid needing a capturing lambda.
|
||||||
*
|
*
|
||||||
* @param eventSupplier
|
* @param eventSupplier
|
||||||
|
@ -99,6 +99,16 @@ public interface EventListenerGroup<T> extends Serializable {
|
||||||
@Incubating
|
@Incubating
|
||||||
<U> void fireEventOnEachListener(final U event, final BiConsumer<T,U> actionOnEvent);
|
<U> void fireEventOnEachListener(final U event, final BiConsumer<T,U> actionOnEvent);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to {@link #fireEventOnEachListener(Object, BiConsumer)}, but allows passing a third parameter
|
||||||
|
* to the consumer; our code based occasionally needs a third parameter: having this additional variant
|
||||||
|
* allows using the optimal iteration more extensively and reduce allocations.
|
||||||
|
* @param event
|
||||||
|
* @param param
|
||||||
|
* @param actionOnEvent
|
||||||
|
* @param <U>
|
||||||
|
* @param <X>
|
||||||
|
*/
|
||||||
@Incubating
|
@Incubating
|
||||||
<U,X> void fireEventOnEachListener(final U event, X param, final EventActionWithParameter<T,U,X> actionOnEvent);
|
<U,X> void fireEventOnEachListener(final U event, X param, final EventActionWithParameter<T,U,X> actionOnEvent);
|
||||||
|
|
||||||
|
|
|
@ -31,14 +31,8 @@ public class PooledLoThreadLocalOptimizer extends AbstractOptimizer {
|
||||||
PooledLoOptimizer.class.getName()
|
PooledLoOptimizer.class.getName()
|
||||||
);
|
);
|
||||||
|
|
||||||
private static class GenerationState {
|
private final ThreadLocal<GenerationState> singleTenantState = ThreadLocal.withInitial( GenerationState::new );
|
||||||
// last value read from db source
|
private final ThreadLocal<Map<String, GenerationState>> multiTenantStates = ThreadLocal.withInitial( HashMap::new );
|
||||||
private IntegralDataTypeHolder lastSourceValue;
|
|
||||||
// the current generator value
|
|
||||||
private IntegralDataTypeHolder value;
|
|
||||||
// the value at which we'll hit the db again
|
|
||||||
private IntegralDataTypeHolder upperLimitValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a PooledLoThreadLocalOptimizer.
|
* Constructs a PooledLoThreadLocalOptimizer.
|
||||||
|
@ -56,50 +50,20 @@ public class PooledLoThreadLocalOptimizer extends AbstractOptimizer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Serializable generate(AccessCallback callback) {
|
public Serializable generate(AccessCallback callback) {
|
||||||
if ( callback.getTenantIdentifier() == null ) {
|
return locateGenerationState( callback.getTenantIdentifier() )
|
||||||
final GenerationState local = localAssignedIds.get();
|
.generate( callback, incrementSize );
|
||||||
if ( local.value != null && local.value.lt( local.upperLimitValue ) ) {
|
|
||||||
return local.value.makeValueThenIncrement();
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
synchronized (this) {
|
|
||||||
final GenerationState generationState = locateGenerationState( callback.getTenantIdentifier() );
|
|
||||||
|
|
||||||
if ( generationState.lastSourceValue == null
|
|
||||||
|| !generationState.value.lt( generationState.upperLimitValue )) {
|
|
||||||
generationState.lastSourceValue = callback.getNextValue();
|
|
||||||
generationState.upperLimitValue = generationState.lastSourceValue.copy().add( incrementSize );
|
|
||||||
generationState.value = generationState.lastSourceValue.copy();
|
|
||||||
// handle cases where initial-value is less that one (hsqldb for instance).
|
|
||||||
while (generationState.value.lt( 1 )) {
|
|
||||||
generationState.value.increment();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return generationState.value.makeValueThenIncrement();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<String, GenerationState> tenantSpecificState;
|
|
||||||
private final ThreadLocal<GenerationState> localAssignedIds = ThreadLocal.withInitial( GenerationState::new );
|
|
||||||
|
|
||||||
private GenerationState locateGenerationState(String tenantIdentifier) {
|
private GenerationState locateGenerationState(String tenantIdentifier) {
|
||||||
if ( tenantIdentifier == null ) {
|
if ( tenantIdentifier == null ) {
|
||||||
return localAssignedIds.get();
|
return singleTenantState.get();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
GenerationState state;
|
Map<String, GenerationState> states = multiTenantStates.get();
|
||||||
if ( tenantSpecificState == null ) {
|
GenerationState state = states.get( tenantIdentifier );
|
||||||
tenantSpecificState = new HashMap<>();
|
|
||||||
state = new GenerationState();
|
|
||||||
tenantSpecificState.put( tenantIdentifier, state );
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
state = tenantSpecificState.get( tenantIdentifier );
|
|
||||||
if ( state == null ) {
|
if ( state == null ) {
|
||||||
state = new GenerationState();
|
state = new GenerationState();
|
||||||
tenantSpecificState.put( tenantIdentifier, state );
|
states.put( tenantIdentifier, state );
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
@ -125,4 +89,26 @@ public class PooledLoThreadLocalOptimizer extends AbstractOptimizer {
|
||||||
public boolean applyIncrementSizeToSourceValues() {
|
public boolean applyIncrementSizeToSourceValues() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static class GenerationState {
|
||||||
|
// last value read from db source
|
||||||
|
private IntegralDataTypeHolder lastSourceValue;
|
||||||
|
// the current generator value
|
||||||
|
private IntegralDataTypeHolder value;
|
||||||
|
// the value at which we'll hit the db again
|
||||||
|
private IntegralDataTypeHolder upperLimitValue;
|
||||||
|
|
||||||
|
private Serializable generate(AccessCallback callback, int incrementSize) {
|
||||||
|
if ( value == null || !value.lt( upperLimitValue ) ) {
|
||||||
|
lastSourceValue = callback.getNextValue();
|
||||||
|
upperLimitValue = lastSourceValue.copy().add( incrementSize );
|
||||||
|
value = lastSourceValue.copy();
|
||||||
|
// handle cases where initial-value is less that one (hsqldb for instance).
|
||||||
|
while ( value.lt( 1 ) ) {
|
||||||
|
value.increment();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value.makeValueThenIncrement();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,14 +33,28 @@ import org.hibernate.event.spi.DeleteEventListener;
|
||||||
import org.hibernate.event.spi.DirtyCheckEventListener;
|
import org.hibernate.event.spi.DirtyCheckEventListener;
|
||||||
import org.hibernate.event.spi.EventType;
|
import org.hibernate.event.spi.EventType;
|
||||||
import org.hibernate.event.spi.EvictEventListener;
|
import org.hibernate.event.spi.EvictEventListener;
|
||||||
|
import org.hibernate.event.spi.FlushEntityEventListener;
|
||||||
import org.hibernate.event.spi.FlushEventListener;
|
import org.hibernate.event.spi.FlushEventListener;
|
||||||
import org.hibernate.event.spi.InitializeCollectionEventListener;
|
import org.hibernate.event.spi.InitializeCollectionEventListener;
|
||||||
import org.hibernate.event.spi.LoadEventListener;
|
import org.hibernate.event.spi.LoadEventListener;
|
||||||
import org.hibernate.event.spi.LockEventListener;
|
import org.hibernate.event.spi.LockEventListener;
|
||||||
import org.hibernate.event.spi.MergeEventListener;
|
import org.hibernate.event.spi.MergeEventListener;
|
||||||
import org.hibernate.event.spi.PersistEventListener;
|
import org.hibernate.event.spi.PersistEventListener;
|
||||||
|
import org.hibernate.event.spi.PostCollectionRecreateEventListener;
|
||||||
|
import org.hibernate.event.spi.PostCollectionRemoveEventListener;
|
||||||
|
import org.hibernate.event.spi.PostCollectionUpdateEventListener;
|
||||||
|
import org.hibernate.event.spi.PostDeleteEventListener;
|
||||||
|
import org.hibernate.event.spi.PostInsertEventListener;
|
||||||
import org.hibernate.event.spi.PostLoadEvent;
|
import org.hibernate.event.spi.PostLoadEvent;
|
||||||
import org.hibernate.event.spi.PostLoadEventListener;
|
import org.hibernate.event.spi.PostLoadEventListener;
|
||||||
|
import org.hibernate.event.spi.PostUpdateEventListener;
|
||||||
|
import org.hibernate.event.spi.PreCollectionRecreateEventListener;
|
||||||
|
import org.hibernate.event.spi.PreCollectionRemoveEventListener;
|
||||||
|
import org.hibernate.event.spi.PreCollectionUpdateEventListener;
|
||||||
|
import org.hibernate.event.spi.PreDeleteEventListener;
|
||||||
|
import org.hibernate.event.spi.PreInsertEventListener;
|
||||||
|
import org.hibernate.event.spi.PreLoadEventListener;
|
||||||
|
import org.hibernate.event.spi.PreUpdateEventListener;
|
||||||
import org.hibernate.event.spi.RefreshEventListener;
|
import org.hibernate.event.spi.RefreshEventListener;
|
||||||
import org.hibernate.event.spi.ReplicateEventListener;
|
import org.hibernate.event.spi.ReplicateEventListener;
|
||||||
import org.hibernate.event.spi.ResolveNaturalIdEventListener;
|
import org.hibernate.event.spi.ResolveNaturalIdEventListener;
|
||||||
|
@ -86,28 +100,44 @@ public final class FastSessionServices {
|
||||||
*/
|
*/
|
||||||
final Map<String, Object> defaultSessionProperties;
|
final Map<String, Object> defaultSessionProperties;
|
||||||
|
|
||||||
// All session events need to be iterated frequently:
|
// All session events need to be iterated frequently; CollectionAction and EventAction also need
|
||||||
final EventListenerGroup<AutoFlushEventListener> eventListenerGroup_AUTO_FLUSH;
|
// most of these very frequently:
|
||||||
final EventListenerGroup<ClearEventListener> eventListenerGroup_CLEAR;
|
public final EventListenerGroup<AutoFlushEventListener> eventListenerGroup_AUTO_FLUSH;
|
||||||
final EventListenerGroup<DeleteEventListener> eventListenerGroup_DELETE;
|
public final EventListenerGroup<ClearEventListener> eventListenerGroup_CLEAR;
|
||||||
final EventListenerGroup<DirtyCheckEventListener> eventListenerGroup_DIRTY_CHECK;
|
public final EventListenerGroup<DeleteEventListener> eventListenerGroup_DELETE;
|
||||||
final EventListenerGroup<EvictEventListener> eventListenerGroup_EVICT;
|
public final EventListenerGroup<DirtyCheckEventListener> eventListenerGroup_DIRTY_CHECK;
|
||||||
final EventListenerGroup<FlushEventListener> eventListenerGroup_FLUSH;
|
public final EventListenerGroup<EvictEventListener> eventListenerGroup_EVICT;
|
||||||
final EventListenerGroup<InitializeCollectionEventListener> eventListenerGroup_INIT_COLLECTION;
|
public final EventListenerGroup<FlushEntityEventListener> eventListenerGroup_FLUSH_ENTITY;
|
||||||
final EventListenerGroup<LoadEventListener> eventListenerGroup_LOAD;
|
public final EventListenerGroup<FlushEventListener> eventListenerGroup_FLUSH;
|
||||||
final EventListenerGroup<LockEventListener> eventListenerGroup_LOCK;
|
public final EventListenerGroup<InitializeCollectionEventListener> eventListenerGroup_INIT_COLLECTION;
|
||||||
final EventListenerGroup<MergeEventListener> eventListenerGroup_MERGE;
|
public final EventListenerGroup<LoadEventListener> eventListenerGroup_LOAD;
|
||||||
final EventListenerGroup<PersistEventListener> eventListenerGroup_PERSIST;
|
public final EventListenerGroup<LockEventListener> eventListenerGroup_LOCK;
|
||||||
final EventListenerGroup<PersistEventListener> eventListenerGroup_PERSIST_ONFLUSH;
|
public final EventListenerGroup<MergeEventListener> eventListenerGroup_MERGE;
|
||||||
final EventListenerGroup<RefreshEventListener> eventListenerGroup_REFRESH;
|
public final EventListenerGroup<PersistEventListener> eventListenerGroup_PERSIST;
|
||||||
final EventListenerGroup<ReplicateEventListener> eventListenerGroup_REPLICATE;
|
public final EventListenerGroup<PersistEventListener> eventListenerGroup_PERSIST_ONFLUSH;
|
||||||
final EventListenerGroup<ResolveNaturalIdEventListener> eventListenerGroup_RESOLVE_NATURAL_ID;
|
public final EventListenerGroup<PostCollectionRecreateEventListener> eventListenerGroup_POST_COLLECTION_RECREATE;
|
||||||
final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_SAVE;
|
public final EventListenerGroup<PostCollectionRemoveEventListener> eventListenerGroup_POST_COLLECTION_REMOVE;
|
||||||
final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_SAVE_UPDATE;
|
public final EventListenerGroup<PostCollectionUpdateEventListener> eventListenerGroup_POST_COLLECTION_UPDATE;
|
||||||
final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_UPDATE;
|
public final EventListenerGroup<PostDeleteEventListener> eventListenerGroup_POST_COMMIT_DELETE;
|
||||||
|
public final EventListenerGroup<PostDeleteEventListener> eventListenerGroup_POST_DELETE;
|
||||||
//Frequently used by 2LC initialization:
|
public final EventListenerGroup<PostInsertEventListener> eventListenerGroup_POST_COMMIT_INSERT;
|
||||||
final EventListenerGroup<PostLoadEventListener> eventListenerGroup_POST_LOAD;
|
public final EventListenerGroup<PostInsertEventListener> eventListenerGroup_POST_INSERT;
|
||||||
|
public final EventListenerGroup<PostLoadEventListener> eventListenerGroup_POST_LOAD; //Frequently used by 2LC initialization:
|
||||||
|
public final EventListenerGroup<PostUpdateEventListener> eventListenerGroup_POST_COMMIT_UPDATE;
|
||||||
|
public final EventListenerGroup<PostUpdateEventListener> eventListenerGroup_POST_UPDATE;
|
||||||
|
public final EventListenerGroup<PreCollectionRecreateEventListener> eventListenerGroup_PRE_COLLECTION_RECREATE;
|
||||||
|
public final EventListenerGroup<PreCollectionRemoveEventListener> eventListenerGroup_PRE_COLLECTION_REMOVE;
|
||||||
|
public final EventListenerGroup<PreCollectionUpdateEventListener> eventListenerGroup_PRE_COLLECTION_UPDATE;
|
||||||
|
public final EventListenerGroup<PreDeleteEventListener> eventListenerGroup_PRE_DELETE;
|
||||||
|
public final EventListenerGroup<PreInsertEventListener> eventListenerGroup_PRE_INSERT;
|
||||||
|
public final EventListenerGroup<PreLoadEventListener> eventListenerGroup_PRE_LOAD;
|
||||||
|
public final EventListenerGroup<PreUpdateEventListener> eventListenerGroup_PRE_UPDATE;
|
||||||
|
public final EventListenerGroup<RefreshEventListener> eventListenerGroup_REFRESH;
|
||||||
|
public final EventListenerGroup<ReplicateEventListener> eventListenerGroup_REPLICATE;
|
||||||
|
public final EventListenerGroup<ResolveNaturalIdEventListener> eventListenerGroup_RESOLVE_NATURAL_ID;
|
||||||
|
public final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_SAVE;
|
||||||
|
public final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_SAVE_UPDATE;
|
||||||
|
public final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_UPDATE;
|
||||||
|
|
||||||
//Intentionally Package private:
|
//Intentionally Package private:
|
||||||
final boolean disallowOutOfTransactionUpdateOperations;
|
final boolean disallowOutOfTransactionUpdateOperations;
|
||||||
|
@ -145,19 +175,36 @@ public final class FastSessionServices {
|
||||||
this.eventListenerGroup_DIRTY_CHECK = listeners( eventListenerRegistry, EventType.DIRTY_CHECK );
|
this.eventListenerGroup_DIRTY_CHECK = listeners( eventListenerRegistry, EventType.DIRTY_CHECK );
|
||||||
this.eventListenerGroup_EVICT = listeners( eventListenerRegistry, EventType.EVICT );
|
this.eventListenerGroup_EVICT = listeners( eventListenerRegistry, EventType.EVICT );
|
||||||
this.eventListenerGroup_FLUSH = listeners( eventListenerRegistry, EventType.FLUSH );
|
this.eventListenerGroup_FLUSH = listeners( eventListenerRegistry, EventType.FLUSH );
|
||||||
|
this.eventListenerGroup_FLUSH_ENTITY = listeners( eventListenerRegistry, EventType.FLUSH_ENTITY );
|
||||||
this.eventListenerGroup_INIT_COLLECTION = listeners( eventListenerRegistry, EventType.INIT_COLLECTION );
|
this.eventListenerGroup_INIT_COLLECTION = listeners( eventListenerRegistry, EventType.INIT_COLLECTION );
|
||||||
this.eventListenerGroup_LOAD = listeners( eventListenerRegistry, EventType.LOAD );
|
this.eventListenerGroup_LOAD = listeners( eventListenerRegistry, EventType.LOAD );
|
||||||
this.eventListenerGroup_LOCK = listeners( eventListenerRegistry, EventType.LOCK );
|
this.eventListenerGroup_LOCK = listeners( eventListenerRegistry, EventType.LOCK );
|
||||||
this.eventListenerGroup_MERGE = listeners( eventListenerRegistry, EventType.MERGE );
|
this.eventListenerGroup_MERGE = listeners( eventListenerRegistry, EventType.MERGE );
|
||||||
this.eventListenerGroup_PERSIST = listeners( eventListenerRegistry, EventType.PERSIST );
|
this.eventListenerGroup_PERSIST = listeners( eventListenerRegistry, EventType.PERSIST );
|
||||||
this.eventListenerGroup_PERSIST_ONFLUSH = listeners( eventListenerRegistry, EventType.PERSIST_ONFLUSH );
|
this.eventListenerGroup_PERSIST_ONFLUSH = listeners( eventListenerRegistry, EventType.PERSIST_ONFLUSH );
|
||||||
|
this.eventListenerGroup_POST_COLLECTION_RECREATE = listeners( eventListenerRegistry, EventType.POST_COLLECTION_RECREATE );
|
||||||
|
this.eventListenerGroup_POST_COLLECTION_REMOVE = listeners( eventListenerRegistry, EventType.POST_COLLECTION_REMOVE );
|
||||||
|
this.eventListenerGroup_POST_COLLECTION_UPDATE = listeners( eventListenerRegistry, EventType.POST_COLLECTION_UPDATE );
|
||||||
|
this.eventListenerGroup_POST_COMMIT_DELETE = listeners( eventListenerRegistry, EventType.POST_COMMIT_DELETE );
|
||||||
|
this.eventListenerGroup_POST_COMMIT_INSERT = listeners( eventListenerRegistry, EventType.POST_COMMIT_INSERT );
|
||||||
|
this.eventListenerGroup_POST_COMMIT_UPDATE = listeners( eventListenerRegistry, EventType.POST_COMMIT_UPDATE );
|
||||||
|
this.eventListenerGroup_POST_DELETE = listeners( eventListenerRegistry, EventType.POST_DELETE );
|
||||||
|
this.eventListenerGroup_POST_INSERT = listeners( eventListenerRegistry, EventType.POST_INSERT );
|
||||||
|
this.eventListenerGroup_POST_LOAD = listeners( eventListenerRegistry, EventType.POST_LOAD );
|
||||||
|
this.eventListenerGroup_POST_UPDATE = listeners( eventListenerRegistry, EventType.POST_UPDATE );
|
||||||
|
this.eventListenerGroup_PRE_COLLECTION_RECREATE = listeners( eventListenerRegistry, EventType.PRE_COLLECTION_RECREATE );
|
||||||
|
this.eventListenerGroup_PRE_COLLECTION_REMOVE = listeners( eventListenerRegistry, EventType.PRE_COLLECTION_REMOVE );
|
||||||
|
this.eventListenerGroup_PRE_COLLECTION_UPDATE = listeners( eventListenerRegistry, EventType.PRE_COLLECTION_UPDATE );
|
||||||
|
this.eventListenerGroup_PRE_DELETE = listeners( eventListenerRegistry, EventType.PRE_DELETE );
|
||||||
|
this.eventListenerGroup_PRE_INSERT = listeners( eventListenerRegistry, EventType.PRE_INSERT );
|
||||||
|
this.eventListenerGroup_PRE_LOAD = listeners( eventListenerRegistry, EventType.PRE_LOAD );
|
||||||
|
this.eventListenerGroup_PRE_UPDATE = listeners( eventListenerRegistry, EventType.PRE_UPDATE );
|
||||||
this.eventListenerGroup_REFRESH = listeners( eventListenerRegistry, EventType.REFRESH );
|
this.eventListenerGroup_REFRESH = listeners( eventListenerRegistry, EventType.REFRESH );
|
||||||
this.eventListenerGroup_REPLICATE = listeners( eventListenerRegistry, EventType.REPLICATE );
|
this.eventListenerGroup_REPLICATE = listeners( eventListenerRegistry, EventType.REPLICATE );
|
||||||
this.eventListenerGroup_RESOLVE_NATURAL_ID = listeners( eventListenerRegistry, EventType.RESOLVE_NATURAL_ID );
|
this.eventListenerGroup_RESOLVE_NATURAL_ID = listeners( eventListenerRegistry, EventType.RESOLVE_NATURAL_ID );
|
||||||
this.eventListenerGroup_SAVE = listeners( eventListenerRegistry, EventType.SAVE );
|
this.eventListenerGroup_SAVE = listeners( eventListenerRegistry, EventType.SAVE );
|
||||||
this.eventListenerGroup_SAVE_UPDATE = listeners( eventListenerRegistry, EventType.SAVE_UPDATE );
|
this.eventListenerGroup_SAVE_UPDATE = listeners( eventListenerRegistry, EventType.SAVE_UPDATE );
|
||||||
this.eventListenerGroup_UPDATE = listeners( eventListenerRegistry, EventType.UPDATE );
|
this.eventListenerGroup_UPDATE = listeners( eventListenerRegistry, EventType.UPDATE );
|
||||||
this.eventListenerGroup_POST_LOAD = listeners( eventListenerRegistry, EventType.POST_LOAD );
|
|
||||||
|
|
||||||
//Other highly useful constants:
|
//Other highly useful constants:
|
||||||
this.dialect = jdbcServices.getJdbcEnvironment().getDialect();
|
this.dialect = jdbcServices.getJdbcEnvironment().getDialect();
|
||||||
|
@ -183,6 +230,7 @@ public final class FastSessionServices {
|
||||||
this.defaultSessionEventListeners = sessionFactoryOptions.getBaselineSessionEventsListenerBuilder();
|
this.defaultSessionEventListeners = sessionFactoryOptions.getBaselineSessionEventsListenerBuilder();
|
||||||
this.defaultLockOptions = initializeDefaultLockOptions( defaultSessionProperties );
|
this.defaultLockOptions = initializeDefaultLockOptions( defaultSessionProperties );
|
||||||
this.initialSessionFlushMode = initializeDefaultFlushMode( defaultSessionProperties );
|
this.initialSessionFlushMode = initializeDefaultFlushMode( defaultSessionProperties );
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static FlushMode initializeDefaultFlushMode(Map<String, Object> defaultSessionProperties) {
|
private static FlushMode initializeDefaultFlushMode(Map<String, Object> defaultSessionProperties) {
|
||||||
|
|
|
@ -48,7 +48,7 @@ public interface QueryProducer {
|
||||||
* Create a typed {@link Query} instance for the given HQL/JPQL query string.
|
* Create a typed {@link Query} instance for the given HQL/JPQL query string.
|
||||||
*
|
*
|
||||||
* @param queryString The HQL/JPQL query
|
* @param queryString The HQL/JPQL query
|
||||||
*
|
* @param resultClass The type of the query result
|
||||||
* @return The Query instance for manipulation and execution
|
* @return The Query instance for manipulation and execution
|
||||||
*
|
*
|
||||||
* @see javax.persistence.EntityManager#createQuery(String,Class)
|
* @see javax.persistence.EntityManager#createQuery(String,Class)
|
||||||
|
@ -105,7 +105,7 @@ public interface QueryProducer {
|
||||||
* implicit mapping to the specified Java type.
|
* implicit mapping to the specified Java type.
|
||||||
*
|
*
|
||||||
* @param sqlString Native (SQL) query string
|
* @param sqlString Native (SQL) query string
|
||||||
* @param resultClass The Java type to map results to
|
* @param resultClass The Java entity type to map results to
|
||||||
*
|
*
|
||||||
* @return The NativeQuery instance for manipulation and execution
|
* @return The NativeQuery instance for manipulation and execution
|
||||||
*
|
*
|
||||||
|
|
|
@ -803,9 +803,8 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
||||||
.setPersister( concreteDescriptor );
|
.setPersister( concreteDescriptor );
|
||||||
|
|
||||||
final EventListenerGroup<PreLoadEventListener> listenerGroup = session.getFactory()
|
final EventListenerGroup<PreLoadEventListener> listenerGroup = session.getFactory()
|
||||||
.getServiceRegistry()
|
.getFastSessionServices()
|
||||||
.getService( EventListenerRegistry.class )
|
.eventListenerGroup_PRE_LOAD;
|
||||||
.getEventListenerGroup( EventType.PRE_LOAD );
|
|
||||||
for ( PreLoadEventListener listener : listenerGroup.listeners() ) {
|
for ( PreLoadEventListener listener : listenerGroup.listeners() ) {
|
||||||
listener.onPreLoad( preLoadEvent );
|
listener.onPreLoad( preLoadEvent );
|
||||||
}
|
}
|
||||||
|
|
|
@ -186,9 +186,8 @@ public class JdbcValuesSourceProcessingStateStandardImpl implements JdbcValuesSo
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
final EventListenerGroup<PostLoadEventListener> listenerGroup = executionContext.getSession().getFactory()
|
final EventListenerGroup<PostLoadEventListener> listenerGroup = executionContext.getSession().getFactory()
|
||||||
.getServiceRegistry()
|
.getFastSessionServices()
|
||||||
.getService( EventListenerRegistry.class )
|
.eventListenerGroup_POST_LOAD;
|
||||||
.getEventListenerGroup( EventType.POST_LOAD );
|
|
||||||
|
|
||||||
loadingEntityMap.forEach(
|
loadingEntityMap.forEach(
|
||||||
(entityKey, loadingEntityEntry) -> {
|
(entityKey, loadingEntityEntry) -> {
|
||||||
|
|
|
@ -0,0 +1,188 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.id.enhanced;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.concurrent.Executors;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
|
||||||
|
import org.hibernate.testing.junit4.BaseUnitTestCase;
|
||||||
|
import org.hibernate.testing.junit4.CustomParameterized;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.Parameterized;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
import static org.assertj.core.api.Assertions.assertThatCode;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
@RunWith(CustomParameterized.class)
|
||||||
|
public class OptimizerConcurrencyUnitTest extends BaseUnitTestCase {
|
||||||
|
|
||||||
|
@Parameterized.Parameters(name = "{0}")
|
||||||
|
public static List<Object[]> params() {
|
||||||
|
List<Object[]> params = new ArrayList<>();
|
||||||
|
for ( StandardOptimizerDescriptor value : StandardOptimizerDescriptor.values() ) {
|
||||||
|
params.add( new Object[] { value } );
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
private final StandardOptimizerDescriptor optimizerDescriptor;
|
||||||
|
|
||||||
|
public OptimizerConcurrencyUnitTest(StandardOptimizerDescriptor optimizerDescriptor) {
|
||||||
|
this.optimizerDescriptor = optimizerDescriptor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testConcurrentUsage_singleTenancy() throws InterruptedException {
|
||||||
|
final int increment = 50;
|
||||||
|
final int taskCount = 100 * increment;
|
||||||
|
|
||||||
|
Optimizer optimizer = buildOptimizer( 1, increment );
|
||||||
|
|
||||||
|
List<Callable<Long>> tasks = new ArrayList<>();
|
||||||
|
|
||||||
|
SourceMock sequence = new SourceMock( 1, increment );
|
||||||
|
assertEquals( 0, sequence.getTimesCalled() );
|
||||||
|
assertEquals( -1, sequence.getCurrentValue() );
|
||||||
|
|
||||||
|
for ( int i = 0; i < taskCount; i++ ) {
|
||||||
|
tasks.add( new Callable<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long call() throws Exception {
|
||||||
|
return ( Long ) optimizer.generate( sequence );
|
||||||
|
}
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
|
||||||
|
ExecutorService executor = Executors.newFixedThreadPool( 10 );
|
||||||
|
List<Future<Long>> futures;
|
||||||
|
try {
|
||||||
|
futures = executor.invokeAll( tasks );
|
||||||
|
executor.shutdown();
|
||||||
|
executor.awaitTermination( 10, TimeUnit.SECONDS );
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
executor.shutdownNow();
|
||||||
|
}
|
||||||
|
|
||||||
|
assertThat( futures )
|
||||||
|
.allSatisfy( future -> {
|
||||||
|
assertThat( future ).isDone();
|
||||||
|
assertThatCode( future::get ).doesNotThrowAnyException();
|
||||||
|
} );
|
||||||
|
List<Long> generated = futures.stream().map( this::getDoneFutureValue ).collect( Collectors.toList());
|
||||||
|
assertThat( generated )
|
||||||
|
.hasSize( taskCount )
|
||||||
|
// Check for uniqueness
|
||||||
|
.containsExactlyInAnyOrderElementsOf( new HashSet<>( generated ) );
|
||||||
|
System.out.println( "Generated IDs: " + generated );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testConcurrentUsage_multiTenancy() throws InterruptedException {
|
||||||
|
final int increment = 50;
|
||||||
|
|
||||||
|
final int tenantCount = 5;
|
||||||
|
final int taskCountPerTenant = 20 * increment;
|
||||||
|
|
||||||
|
Optimizer optimizer = buildOptimizer( 1, increment );
|
||||||
|
|
||||||
|
Map<String, List<Callable<Long>>> tasksByTenantId = new LinkedHashMap<>();
|
||||||
|
|
||||||
|
for ( int i = 0; i < tenantCount; i++ ) {
|
||||||
|
String tenantId = "tenant#" + i;
|
||||||
|
|
||||||
|
SourceMock sequenceForTenant = new SourceMock( tenantId, 1, increment );
|
||||||
|
assertEquals( 0, sequenceForTenant.getTimesCalled() );
|
||||||
|
assertEquals( -1, sequenceForTenant.getCurrentValue() );
|
||||||
|
|
||||||
|
List<Callable<Long>> tasksForTenant = new ArrayList<>();
|
||||||
|
tasksByTenantId.put( tenantId, tasksForTenant );
|
||||||
|
for ( int j = 0; j < taskCountPerTenant; j++ ) {
|
||||||
|
tasksForTenant.add( new Callable<Long>() {
|
||||||
|
@Override
|
||||||
|
public Long call() throws Exception {
|
||||||
|
return ( Long ) optimizer.generate( sequenceForTenant );
|
||||||
|
}
|
||||||
|
} );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Callable<Long>> tasks = new ArrayList<>();
|
||||||
|
// Make sure to interleave tenants
|
||||||
|
for ( int i = 0; i < taskCountPerTenant; i++ ) {
|
||||||
|
for ( List<Callable<Long>> tasksForTenant : tasksByTenantId.values() ) {
|
||||||
|
tasks.add( tasksForTenant.get( i ) );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ExecutorService executor = Executors.newFixedThreadPool( 10 );
|
||||||
|
List<Future<Long>> futures;
|
||||||
|
try {
|
||||||
|
futures = executor.invokeAll( tasks );
|
||||||
|
executor.shutdown();
|
||||||
|
executor.awaitTermination( 10, TimeUnit.SECONDS );
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
executor.shutdownNow();
|
||||||
|
}
|
||||||
|
|
||||||
|
assertThat( futures )
|
||||||
|
.allSatisfy( future -> {
|
||||||
|
assertThat( future ).isDone();
|
||||||
|
assertThatCode( future::get ).doesNotThrowAnyException();
|
||||||
|
} );
|
||||||
|
|
||||||
|
Map<String, List<Future<Long>>> futuresByTenantId = new LinkedHashMap<>();
|
||||||
|
for ( int i = 0; i < tenantCount; i++ ) {
|
||||||
|
List<Future<Long>> futuresForTenant = new ArrayList<>();
|
||||||
|
for ( int j = 0; j < taskCountPerTenant; j++ ) {
|
||||||
|
futuresForTenant.add( futures.get( i + j * tenantCount ) );
|
||||||
|
}
|
||||||
|
String tenantId = "tenant#" + i;
|
||||||
|
futuresByTenantId.put( tenantId, futuresForTenant );
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( Map.Entry<String, List<Future<Long>>> entry : futuresByTenantId.entrySet() ) {
|
||||||
|
List<Long> generated = entry.getValue().stream().map( this::getDoneFutureValue )
|
||||||
|
.collect( Collectors.toList());
|
||||||
|
assertThat( generated )
|
||||||
|
.hasSize( taskCountPerTenant )
|
||||||
|
// Check for uniqueness
|
||||||
|
.containsExactlyInAnyOrderElementsOf( new HashSet<>( generated ) );
|
||||||
|
System.out.println( "Generated IDs for '" + entry.getKey() + "': " + generated );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Optimizer buildOptimizer(long initial, int increment) {
|
||||||
|
return OptimizerFactory.buildOptimizer( optimizerDescriptor.getExternalName(), Long.class, increment, initial );
|
||||||
|
}
|
||||||
|
|
||||||
|
private <R> R getDoneFutureValue(Future<R> future) {
|
||||||
|
try {
|
||||||
|
return future.get(0, TimeUnit.SECONDS);
|
||||||
|
}
|
||||||
|
catch (InterruptedException | ExecutionException | TimeoutException e) {
|
||||||
|
throw new AssertionFailure( "Unexpected Future state", e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,79 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||||
|
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
|
||||||
|
*/
|
||||||
|
package org.hibernate.id.enhanced;
|
||||||
|
|
||||||
|
import org.hibernate.id.IdentifierGeneratorHelper;
|
||||||
|
import org.hibernate.id.IntegralDataTypeHolder;
|
||||||
|
|
||||||
|
class SourceMock implements AccessCallback {
|
||||||
|
private final String tenantId;
|
||||||
|
private final long initialValue;
|
||||||
|
private final int increment;
|
||||||
|
private volatile long currentValue;
|
||||||
|
private volatile int timesCalled;
|
||||||
|
|
||||||
|
public SourceMock(long initialValue) {
|
||||||
|
this( initialValue, 1 );
|
||||||
|
}
|
||||||
|
|
||||||
|
public SourceMock(long initialValue, int increment) {
|
||||||
|
this( null, initialValue, increment );
|
||||||
|
}
|
||||||
|
|
||||||
|
public SourceMock(String tenantId, long initialValue, int increment) {
|
||||||
|
this( tenantId, initialValue, increment, 0 );
|
||||||
|
}
|
||||||
|
|
||||||
|
public SourceMock(long initialValue, int increment, int timesCalled) {
|
||||||
|
this( null, initialValue, increment, timesCalled );
|
||||||
|
}
|
||||||
|
|
||||||
|
public SourceMock(String tenantId, long initialValue, int increment, int timesCalled) {
|
||||||
|
this.tenantId = tenantId;
|
||||||
|
this.increment = increment;
|
||||||
|
this.timesCalled = timesCalled;
|
||||||
|
if ( timesCalled != 0 ) {
|
||||||
|
this.currentValue = initialValue;
|
||||||
|
this.initialValue = 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.currentValue = -1;
|
||||||
|
this.initialValue = initialValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public synchronized IntegralDataTypeHolder getNextValue() {
|
||||||
|
try {
|
||||||
|
if ( timesCalled == 0 ) {
|
||||||
|
currentValue = initialValue;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
currentValue += increment;
|
||||||
|
}
|
||||||
|
IdentifierGeneratorHelper.BasicHolder result = new IdentifierGeneratorHelper.BasicHolder( Long.class );
|
||||||
|
result.initialize( currentValue );
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
++timesCalled;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getTenantIdentifier() {
|
||||||
|
return tenantId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getTimesCalled() {
|
||||||
|
return timesCalled;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getCurrentValue() {
|
||||||
|
return currentValue;
|
||||||
|
}
|
||||||
|
}
|
|
@ -228,9 +228,9 @@ public abstract class BaseEntityManagerFunctionalTestCase extends BaseUnitTestCa
|
||||||
config.put( AvailableSettings.XML_FILE_NAMES, dds );
|
config.put( AvailableSettings.XML_FILE_NAMES, dds );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( !config.containsKey( Environment.CONNECTION_PROVIDER ) ) {
|
|
||||||
config.put( GlobalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
config.put( GlobalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
||||||
config.put( LocalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
config.put( LocalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
||||||
|
if ( !config.containsKey( Environment.CONNECTION_PROVIDER ) ) {
|
||||||
config.put(
|
config.put(
|
||||||
AvailableSettings.CONNECTION_PROVIDER,
|
AvailableSettings.CONNECTION_PROVIDER,
|
||||||
SharedDriverManagerConnectionProviderImpl.getInstance()
|
SharedDriverManagerConnectionProviderImpl.getInstance()
|
||||||
|
|
|
@ -61,8 +61,7 @@ public class LockTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
@Override
|
@Override
|
||||||
protected void addConfigOptions(Map options) {
|
protected void addConfigOptions(Map options) {
|
||||||
super.addConfigOptions( options );
|
super.addConfigOptions( options );
|
||||||
// Looks like Oracle Connections that experience a timeout produce different errors when they timeout again?!
|
// We can't use a shared connection provider if we use TransactionUtil.setJdbcTimeout because that is set on the connection level
|
||||||
SharedDriverManagerConnectionProviderImpl.getInstance().reset();
|
|
||||||
options.remove( org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER );
|
options.remove( org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,7 @@ public class StatementIsClosedAfterALockExceptionTest extends BaseEntityManagerF
|
||||||
@Override
|
@Override
|
||||||
protected Map getConfig() {
|
protected Map getConfig() {
|
||||||
Map config = super.getConfig();
|
Map config = super.getConfig();
|
||||||
CONNECTION_PROVIDER.setConnectionProvider( (ConnectionProvider) config.get( AvailableSettings.CONNECTION_PROVIDER ) );
|
// We can't use a shared connection provider if we use TransactionUtil.setJdbcTimeout because that is set on the connection level
|
||||||
config.put(
|
config.put(
|
||||||
org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER,
|
org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER,
|
||||||
CONNECTION_PROVIDER
|
CONNECTION_PROVIDER
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.GenerationType;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.MappedSuperclass;
|
||||||
|
import javax.persistence.Temporal;
|
||||||
|
import javax.persistence.TemporalType;
|
||||||
|
|
||||||
|
@MappedSuperclass
|
||||||
|
public class BaseEntity {
|
||||||
|
@GeneratedValue(
|
||||||
|
strategy = GenerationType.IDENTITY
|
||||||
|
)
|
||||||
|
@Id
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
private Date createdOn;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import javax.persistence.CascadeType;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.OneToOne;
|
||||||
|
import javax.persistence.Version;
|
||||||
|
|
||||||
|
@Entity(name = "Customer")
|
||||||
|
public class Customer extends BaseEntity {
|
||||||
|
|
||||||
|
@Version
|
||||||
|
@Column(name = "version")
|
||||||
|
private int version;
|
||||||
|
|
||||||
|
@OneToOne(optional = false, fetch = FetchType.LAZY, cascade = {
|
||||||
|
CascadeType.PERSIST,
|
||||||
|
CascadeType.MERGE,
|
||||||
|
CascadeType.REMOVE
|
||||||
|
})
|
||||||
|
private User user;
|
||||||
|
|
||||||
|
public int getVersion() {
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVersion(int version) {
|
||||||
|
this.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public User getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUser(User user) {
|
||||||
|
this.user = user;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,252 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.ManyToOne;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
import javax.persistence.Temporal;
|
||||||
|
import javax.persistence.TemporalType;
|
||||||
|
import javax.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
|
import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLazinessInterceptor;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.engine.spi.PersistentAttributeInterceptable;
|
||||||
|
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.CustomEnhancementContext;
|
||||||
|
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||||
|
import static org.hamcrest.CoreMatchers.is;
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
|
||||||
|
@RunWith(BytecodeEnhancerRunner.class)
|
||||||
|
@CustomEnhancementContext({ NoDirtyCheckEnhancementContext.class, DirtyCheckEnhancementContext.class })
|
||||||
|
public class EntityWithMutableAttributesTest extends BaseNonConfigCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
boolean skipTest;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void configureStandardServiceRegistryBuilder(StandardServiceRegistryBuilder ssrb) {
|
||||||
|
super.configureStandardServiceRegistryBuilder( ssrb );
|
||||||
|
ssrb.applySetting( AvailableSettings.ALLOW_ENHANCEMENT_AS_PROXY, "true" );
|
||||||
|
ssrb.applySetting( AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, "100" );
|
||||||
|
ssrb.applySetting( AvailableSettings.GENERATE_STATISTICS, "true" );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void applyMetadataSources(MetadataSources sources) {
|
||||||
|
String byteCodeProvider = Environment.getProperties().getProperty( AvailableSettings.BYTECODE_PROVIDER );
|
||||||
|
if ( byteCodeProvider != null && !Environment.BYTECODE_PROVIDER_NAME_BYTEBUDDY.equals( byteCodeProvider ) ) {
|
||||||
|
// skip the test if the bytecode provider is Javassist
|
||||||
|
skipTest = true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sources.addAnnotatedClass( User.class );
|
||||||
|
sources.addAnnotatedClass( Role.class );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = new User();
|
||||||
|
user.setId( 1 );
|
||||||
|
user.setDate( new Date() );
|
||||||
|
user.setEmail( "not null string" );
|
||||||
|
|
||||||
|
|
||||||
|
Role role = new Role();
|
||||||
|
role.setId( 2 );
|
||||||
|
role.setDate( new Date() );
|
||||||
|
role.setName( "manager" );
|
||||||
|
|
||||||
|
user.setRole( role );
|
||||||
|
|
||||||
|
session.save( role );
|
||||||
|
session.save( user );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
session.createQuery( "delete from User" ).executeUpdate();
|
||||||
|
session.createQuery( "delete from Role" ).executeUpdate();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLoad() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = session.load( User.class, 1 );
|
||||||
|
assertThat(
|
||||||
|
user, instanceOf( PersistentAttributeInterceptable.class )
|
||||||
|
);
|
||||||
|
final PersistentAttributeInterceptable interceptable = (PersistentAttributeInterceptable) user;
|
||||||
|
assertThat(
|
||||||
|
interceptable.$$_hibernate_getInterceptor(),
|
||||||
|
instanceOf( EnhancementAsProxyLazinessInterceptor.class )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMutableAttributeIsUpdated() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = session.load( User.class, 1 );
|
||||||
|
user.getDate().setTime( 0 );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = session.getReference( User.class, 1 );
|
||||||
|
assertThat( user.getDate().getTime(), is( 0L ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Entity(name = "User")
|
||||||
|
@Table(name = "appuser")
|
||||||
|
public static class User {
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private String email;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "t_date")
|
||||||
|
private Date date;
|
||||||
|
|
||||||
|
@ManyToOne
|
||||||
|
private Role role;
|
||||||
|
|
||||||
|
public Integer getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEmail() {
|
||||||
|
return email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEmail(String email) {
|
||||||
|
this.email = email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Role getRole() {
|
||||||
|
return role;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRole(Role role) {
|
||||||
|
this.role = role;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getDate() {
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDate(Date date) {
|
||||||
|
this.date = date;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Role")
|
||||||
|
@Table(name = "approle")
|
||||||
|
public static class Role {
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "t_date")
|
||||||
|
private Date date;
|
||||||
|
|
||||||
|
private String description;
|
||||||
|
|
||||||
|
public Integer getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDescription(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getDate() {
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDate(Date date) {
|
||||||
|
this.date = date;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,269 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import javax.persistence.TypedQuery;
|
||||||
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
|
import javax.persistence.criteria.Root;
|
||||||
|
|
||||||
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
|
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.CustomEnhancementContext;
|
||||||
|
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.is;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
|
||||||
|
@TestForIssue(jiraKey = "HHH14424")
|
||||||
|
@RunWith(BytecodeEnhancerRunner.class)
|
||||||
|
@CustomEnhancementContext({ DirtyCheckEnhancementContext.class, NoDirtyCheckEnhancementContext.class })
|
||||||
|
public class LoadAndUpdateEntitiesWithCollectionsTest extends BaseNonConfigCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
boolean skipTest;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void configureStandardServiceRegistryBuilder(StandardServiceRegistryBuilder ssrb) {
|
||||||
|
super.configureStandardServiceRegistryBuilder( ssrb );
|
||||||
|
ssrb.applySetting( AvailableSettings.ALLOW_ENHANCEMENT_AS_PROXY, "true" );
|
||||||
|
ssrb.applySetting( AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, "100" );
|
||||||
|
ssrb.applySetting( AvailableSettings.GENERATE_STATISTICS, "true" );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void applyMetadataSources(MetadataSources sources) {
|
||||||
|
String byteCodeProvider = Environment.getProperties().getProperty( AvailableSettings.BYTECODE_PROVIDER );
|
||||||
|
if ( byteCodeProvider != null && !Environment.BYTECODE_PROVIDER_NAME_BYTEBUDDY.equals( byteCodeProvider ) ) {
|
||||||
|
// skip the test if the bytecode provider is Javassist
|
||||||
|
skipTest = true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sources.addAnnotatedClass( SamplingOrder.class );
|
||||||
|
sources.addAnnotatedClass( Customer.class );
|
||||||
|
sources.addAnnotatedClass( User.class );
|
||||||
|
sources.addAnnotatedClass( Role.class );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = new User();
|
||||||
|
user.setEmail( "foo@bar.com" );
|
||||||
|
|
||||||
|
Role role = new Role();
|
||||||
|
role.setName( "admin" );
|
||||||
|
|
||||||
|
user.addRole( role );
|
||||||
|
|
||||||
|
Customer customer = new Customer();
|
||||||
|
customer.setUser( user );
|
||||||
|
|
||||||
|
SamplingOrder order = new SamplingOrder();
|
||||||
|
order.setNote( "it is a sample" );
|
||||||
|
order.setCustomer( customer );
|
||||||
|
|
||||||
|
|
||||||
|
session.save( user );
|
||||||
|
session.save( role );
|
||||||
|
session.save( customer );
|
||||||
|
session.save( order );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
session.createQuery( "delete from SamplingOrder" ).executeUpdate();
|
||||||
|
session.createQuery( "delete from Customer" ).executeUpdate();
|
||||||
|
session.createQuery( "delete from User" ).executeUpdate();
|
||||||
|
session.createQuery( "delete from Role" ).executeUpdate();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLoad() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
CriteriaBuilder cb = session.getCriteriaBuilder();
|
||||||
|
CriteriaQuery<SamplingOrder> cq = cb.createQuery( SamplingOrder.class );
|
||||||
|
Root<SamplingOrder> root = cq.from( SamplingOrder.class );
|
||||||
|
root.fetch( SamplingOrder_.customer );
|
||||||
|
|
||||||
|
TypedQuery<SamplingOrder> query = session.createQuery( cq );
|
||||||
|
query.getResultList();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAddUserRoles() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrder( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
Role role = new Role();
|
||||||
|
role.setName( "superuser" );
|
||||||
|
user.addRole( role );
|
||||||
|
session.save( role );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 2 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrder( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
Role role = new Role();
|
||||||
|
role.setName( "user" );
|
||||||
|
user.getRoles().add( role );
|
||||||
|
session.save( role );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 3 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = session
|
||||||
|
.createQuery(
|
||||||
|
"from User",
|
||||||
|
User.class
|
||||||
|
)
|
||||||
|
.list()
|
||||||
|
.get( 0 );
|
||||||
|
Role role = new Role();
|
||||||
|
user.getRoles().add( role );
|
||||||
|
session.save( role );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session
|
||||||
|
.createQuery(
|
||||||
|
"from User u",
|
||||||
|
User.class
|
||||||
|
)
|
||||||
|
.list();
|
||||||
|
User user = users
|
||||||
|
.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 4 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteUserRoles() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrder( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
user.setRoles( new HashSet<>() );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 0 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testModifyUserMail() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrder( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
user.setEmail( "bar@foo.com" );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "bar@foo.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 1 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private SamplingOrder getSamplingOrder(SessionImplementor session) {
|
||||||
|
CriteriaBuilder cb = session.getCriteriaBuilder();
|
||||||
|
CriteriaQuery<SamplingOrder> cq = cb.createQuery( SamplingOrder.class );
|
||||||
|
Root<SamplingOrder> root = cq.from( SamplingOrder.class );
|
||||||
|
root.fetch( SamplingOrder_.customer );
|
||||||
|
|
||||||
|
TypedQuery<SamplingOrder> query = session.createQuery( cq );
|
||||||
|
List<SamplingOrder> resultList = query.getResultList();
|
||||||
|
return resultList.get( 0 );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.GenerationType;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
|
||||||
|
@Entity(name = "Role")
|
||||||
|
@Table(name = "approle")
|
||||||
|
public class Role {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||||
|
@Column(name = "id", updatable = false, nullable = false)
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.JoinColumn;
|
||||||
|
import javax.persistence.ManyToOne;
|
||||||
|
|
||||||
|
@Entity(name = "SamplingOrder")
|
||||||
|
public class SamplingOrder {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
private String note;
|
||||||
|
|
||||||
|
@ManyToOne(fetch = FetchType.LAZY)
|
||||||
|
@JoinColumn(name = "customerId")
|
||||||
|
private Customer customer;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getNote() {
|
||||||
|
return note;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setNote(String note) {
|
||||||
|
this.note = note;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Customer getCustomer() {
|
||||||
|
return customer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCustomer(Customer customer) {
|
||||||
|
this.customer = customer;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,314 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import javax.persistence.Embeddable;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.ManyToOne;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
import javax.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.DynamicUpdate;
|
||||||
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
|
import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLazinessInterceptor;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.engine.spi.PersistentAttributeInterceptable;
|
||||||
|
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.CustomEnhancementContext;
|
||||||
|
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||||
|
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||||
|
import static org.hamcrest.CoreMatchers.nullValue;
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.core.Is.is;
|
||||||
|
|
||||||
|
@RunWith(BytecodeEnhancerRunner.class)
|
||||||
|
@CustomEnhancementContext({ NoDirtyCheckEnhancementContext.class, DirtyCheckEnhancementContext.class })
|
||||||
|
public class SimpleDynamicUpdateTest extends BaseNonConfigCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
boolean skipTest;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void configureStandardServiceRegistryBuilder(StandardServiceRegistryBuilder ssrb) {
|
||||||
|
super.configureStandardServiceRegistryBuilder( ssrb );
|
||||||
|
ssrb.applySetting( AvailableSettings.ALLOW_ENHANCEMENT_AS_PROXY, "true" );
|
||||||
|
ssrb.applySetting( AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, "100" );
|
||||||
|
ssrb.applySetting( AvailableSettings.GENERATE_STATISTICS, "true" );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void applyMetadataSources(MetadataSources sources) {
|
||||||
|
String byteCodeProvider = Environment.getProperties().getProperty( AvailableSettings.BYTECODE_PROVIDER );
|
||||||
|
if ( byteCodeProvider != null && !Environment.BYTECODE_PROVIDER_NAME_BYTEBUDDY.equals( byteCodeProvider ) ) {
|
||||||
|
// skip the test if the bytecode provider is Javassist
|
||||||
|
skipTest = true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sources.addAnnotatedClass( User.class );
|
||||||
|
sources.addAnnotatedClass( Role.class );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = new User();
|
||||||
|
user.setId( 1 );
|
||||||
|
user.setEmail( "not null string" );
|
||||||
|
|
||||||
|
Address address = new Address();
|
||||||
|
address.setState( "Texas" );
|
||||||
|
|
||||||
|
user.setAddress( address );
|
||||||
|
|
||||||
|
Role role = new Role();
|
||||||
|
role.setId( 2 );
|
||||||
|
role.setName( "manager" );
|
||||||
|
|
||||||
|
user.setRole( role );
|
||||||
|
|
||||||
|
session.save( role );
|
||||||
|
session.save( user );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIt() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = session.getReference( User.class, 1 );
|
||||||
|
assertThat(
|
||||||
|
user, instanceOf( PersistentAttributeInterceptable.class )
|
||||||
|
);
|
||||||
|
final PersistentAttributeInterceptable interceptable = (PersistentAttributeInterceptable) user;
|
||||||
|
assertThat(
|
||||||
|
interceptable.$$_hibernate_getInterceptor(),
|
||||||
|
instanceOf( EnhancementAsProxyLazinessInterceptor.class )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
entity.setName( "abc" );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
assertThat( entity.getName(), is( "abc" ) );
|
||||||
|
assertThat( entity.getEmail(), is( "not null string" ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
entity.setRole( null );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
assertThat( entity.getName(), is( "abc" ) );
|
||||||
|
assertThat( entity.getEmail(), is( "not null string" ) );
|
||||||
|
assertThat( entity.getRole(), is( nullValue() ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
entity.setName( null );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
assertThat( entity.getName(), is( nullValue() ) );
|
||||||
|
assertThat( entity.getEmail(), is( "not null string" ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
entity.setAddress( null );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
assertThat( entity.getName(), is( nullValue() ) );
|
||||||
|
assertThat( entity.getEmail(), is( "not null string" ) );
|
||||||
|
assertThat( entity.getRole(), is( nullValue() ) );
|
||||||
|
assertThat( entity.getAddress(), is( nullValue() ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
Role role = new Role();
|
||||||
|
role.setId( 3 );
|
||||||
|
role.setName( "user" );
|
||||||
|
entity.setRole( role );
|
||||||
|
session.save( role );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User entity = session.getReference( User.class, 1 );
|
||||||
|
assertThat( entity.getName(), is( nullValue() ) );
|
||||||
|
assertThat( entity.getEmail(), is( "not null string" ) );
|
||||||
|
assertThat( entity.getRole(), is( notNullValue() ) );
|
||||||
|
assertThat( entity.getAddress(), is( nullValue() ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "User")
|
||||||
|
@Table(name = "appuser")
|
||||||
|
@DynamicUpdate
|
||||||
|
public static class User {
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private String email;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
private Address address;
|
||||||
|
|
||||||
|
@ManyToOne
|
||||||
|
private Role role;
|
||||||
|
|
||||||
|
public Integer getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEmail() {
|
||||||
|
return email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEmail(String email) {
|
||||||
|
this.email = email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Address getAddress() {
|
||||||
|
return address;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAddress(Address address) {
|
||||||
|
this.address = address;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Role getRole() {
|
||||||
|
return role;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRole(Role role) {
|
||||||
|
this.role = role;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Role")
|
||||||
|
@Table(name = "approle")
|
||||||
|
@DynamicUpdate
|
||||||
|
public static class Role {
|
||||||
|
@Id
|
||||||
|
private Integer id;
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
private String description;
|
||||||
|
|
||||||
|
public Integer getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Integer id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDescription(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Embeddable
|
||||||
|
public static class Address {
|
||||||
|
private String street;
|
||||||
|
private String state;
|
||||||
|
|
||||||
|
public String getStreet() {
|
||||||
|
return street;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setStreet(String street) {
|
||||||
|
this.street = street;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getState() {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setState(String state) {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.JoinColumn;
|
||||||
|
import javax.persistence.JoinTable;
|
||||||
|
import javax.persistence.ManyToMany;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
import javax.validation.constraints.NotNull;
|
||||||
|
import javax.validation.constraints.Size;
|
||||||
|
|
||||||
|
@Entity(name = "User")
|
||||||
|
@Table(name = "appuser")
|
||||||
|
public class User extends BaseEntity {
|
||||||
|
|
||||||
|
@Column(unique = true, nullable = false)
|
||||||
|
@NotNull
|
||||||
|
private String email;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@ManyToMany(fetch = FetchType.LAZY)
|
||||||
|
@JoinTable(name = "user_in_role", joinColumns = @JoinColumn(name = "userid"), inverseJoinColumns = @JoinColumn(name = "roleid"))
|
||||||
|
public Set<Role> roles = new HashSet<>();
|
||||||
|
|
||||||
|
public String getEmail() {
|
||||||
|
return email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEmail(String email) {
|
||||||
|
this.email = email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<Role> getRoles() {
|
||||||
|
return roles;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRoles(Set<Role> roles) {
|
||||||
|
this.roles = roles;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addRole(Role role) {
|
||||||
|
this.roles.add( role );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.dynamicupdate;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.GenerationType;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.MappedSuperclass;
|
||||||
|
import javax.persistence.Temporal;
|
||||||
|
import javax.persistence.TemporalType;
|
||||||
|
|
||||||
|
@MappedSuperclass
|
||||||
|
public class BaseEntity {
|
||||||
|
@GeneratedValue(
|
||||||
|
strategy = GenerationType.IDENTITY
|
||||||
|
)
|
||||||
|
@Id
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
private Date createdOn;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.dynamicupdate;
|
||||||
|
|
||||||
|
import javax.persistence.CascadeType;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.OneToOne;
|
||||||
|
import javax.persistence.Version;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.DynamicUpdate;
|
||||||
|
|
||||||
|
@Entity(name = "Customer")
|
||||||
|
@DynamicUpdate
|
||||||
|
public class Customer extends BaseEntity {
|
||||||
|
|
||||||
|
@Version
|
||||||
|
@Column(name = "version")
|
||||||
|
private int version;
|
||||||
|
|
||||||
|
@OneToOne(optional = false, fetch = FetchType.LAZY, cascade = {
|
||||||
|
CascadeType.PERSIST,
|
||||||
|
CascadeType.MERGE,
|
||||||
|
CascadeType.REMOVE
|
||||||
|
})
|
||||||
|
private User user;
|
||||||
|
|
||||||
|
public int getVersion() {
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVersion(int version) {
|
||||||
|
this.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public User getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUser(User user) {
|
||||||
|
this.user = user;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,279 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.dynamicupdate;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import javax.persistence.TypedQuery;
|
||||||
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
|
import javax.persistence.criteria.Root;
|
||||||
|
|
||||||
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.engine.spi.SessionImplementor;
|
||||||
|
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.BytecodeEnhancerRunner;
|
||||||
|
import org.hibernate.testing.bytecode.enhancement.CustomEnhancementContext;
|
||||||
|
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
||||||
|
import org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.DirtyCheckEnhancementContext;
|
||||||
|
import org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.NoDirtyCheckEnhancementContext;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.is;
|
||||||
|
import static org.hamcrest.CoreMatchers.nullValue;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
|
||||||
|
@TestForIssue(jiraKey = "HHH14424")
|
||||||
|
@RunWith(BytecodeEnhancerRunner.class)
|
||||||
|
@CustomEnhancementContext({ NoDirtyCheckEnhancementContext.class, DirtyCheckEnhancementContext.class })
|
||||||
|
public class DynamicUpdateAndCollectionsTest extends BaseNonConfigCoreFunctionalTestCase {
|
||||||
|
|
||||||
|
boolean skipTest;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void configureStandardServiceRegistryBuilder(StandardServiceRegistryBuilder ssrb) {
|
||||||
|
super.configureStandardServiceRegistryBuilder( ssrb );
|
||||||
|
ssrb.applySetting( AvailableSettings.ALLOW_ENHANCEMENT_AS_PROXY, "true" );
|
||||||
|
ssrb.applySetting( AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, "100" );
|
||||||
|
ssrb.applySetting( AvailableSettings.GENERATE_STATISTICS, "true" );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void applyMetadataSources(MetadataSources sources) {
|
||||||
|
String byteCodeProvider = Environment.getProperties().getProperty( AvailableSettings.BYTECODE_PROVIDER );
|
||||||
|
if ( byteCodeProvider != null && !Environment.BYTECODE_PROVIDER_NAME_BYTEBUDDY.equals( byteCodeProvider ) ) {
|
||||||
|
// skip the test if the bytecode provider is Javassist
|
||||||
|
skipTest = true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sources.addAnnotatedClass( SamplingOrder.class );
|
||||||
|
sources.addAnnotatedClass( Customer.class );
|
||||||
|
sources.addAnnotatedClass( User.class );
|
||||||
|
sources.addAnnotatedClass( Role.class );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = new User();
|
||||||
|
user.setEmail( "foo@bar.com" );
|
||||||
|
|
||||||
|
Role role = new Role();
|
||||||
|
role.setName( "admin" );
|
||||||
|
|
||||||
|
user.addRole( role );
|
||||||
|
|
||||||
|
Customer customer = new Customer();
|
||||||
|
customer.setUser( user );
|
||||||
|
|
||||||
|
SamplingOrder order = new SamplingOrder();
|
||||||
|
order.setNote( "it is a sample" );
|
||||||
|
order.setCustomer( customer );
|
||||||
|
|
||||||
|
session.save( user );
|
||||||
|
session.save( role );
|
||||||
|
session.save( customer );
|
||||||
|
session.save( order );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
session.createQuery( "delete from SamplingOrder" ).executeUpdate();
|
||||||
|
session.createQuery( "delete from Customer" ).executeUpdate();
|
||||||
|
session.createQuery( "delete from User" ).executeUpdate();
|
||||||
|
session.createQuery( "delete from Role" ).executeUpdate();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLoad() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
CriteriaBuilder cb = session.getCriteriaBuilder();
|
||||||
|
CriteriaQuery<SamplingOrder> cq = cb.createQuery( SamplingOrder.class );
|
||||||
|
Root<SamplingOrder> root = cq.from( SamplingOrder.class );
|
||||||
|
root.fetch( SamplingOrder_.customer );
|
||||||
|
|
||||||
|
TypedQuery<SamplingOrder> query = session.createQuery( cq );
|
||||||
|
query.getResultList();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveCustomers() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Long samplingOrderId = fromTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrderFetchCustomer( session );
|
||||||
|
samplingOrder.setCustomer( null );
|
||||||
|
return samplingOrder.getId();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = session.get( SamplingOrder.class, samplingOrderId );
|
||||||
|
assertThat( samplingOrder.getCustomer(), is( nullValue() ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAddUserRoles() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrderFetchCustomer( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
Role role = new Role();
|
||||||
|
role.setName( "superuser" );
|
||||||
|
user.addRole( role );
|
||||||
|
session.save( role );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 2 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrderFetchCustomer( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
Role role = new Role();
|
||||||
|
user.getRoles().add( role );
|
||||||
|
session.save( role );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 3 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
User user = session.createQuery( "from User", User.class ).list().get( 0 );
|
||||||
|
Role role = new Role();
|
||||||
|
user.getRoles().add( role );
|
||||||
|
session.save( role );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "foo@bar.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 4 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteUserRoles() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrderFetchCustomer( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
user.setRoles( new HashSet<>() );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getRoles().size(), is( 0 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testModifyUserMail() {
|
||||||
|
if ( skipTest ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
SamplingOrder samplingOrder = getSamplingOrderFetchCustomer( session );
|
||||||
|
User user = samplingOrder.getCustomer().getUser();
|
||||||
|
user.setEmail( "bar@foo.com" );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
inTransaction(
|
||||||
|
session -> {
|
||||||
|
List<User> users = session.createQuery( "from User u", User.class ).list();
|
||||||
|
User user = users.get( 0 );
|
||||||
|
assertThat( user.getEmail(), is( "bar@foo.com" ) );
|
||||||
|
assertThat( user.getRoles().size(), is( 1 ) );
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private SamplingOrder getSamplingOrderFetchCustomer(SessionImplementor session) {
|
||||||
|
CriteriaBuilder cb = session.getCriteriaBuilder();
|
||||||
|
CriteriaQuery<SamplingOrder> cq = cb.createQuery( SamplingOrder.class );
|
||||||
|
Root<SamplingOrder> root = cq.from( SamplingOrder.class );
|
||||||
|
root.fetch( SamplingOrder_.customer );
|
||||||
|
|
||||||
|
TypedQuery<SamplingOrder> query = session.createQuery( cq );
|
||||||
|
List<SamplingOrder> resultList = query.getResultList();
|
||||||
|
return resultList.get( 0 );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.dynamicupdate;
|
||||||
|
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.GenerationType;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.DynamicUpdate;
|
||||||
|
|
||||||
|
@Entity(name = "Role")
|
||||||
|
@Table(name = "approle")
|
||||||
|
@DynamicUpdate
|
||||||
|
public class Role {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||||
|
@Column(name = "id", updatable = false, nullable = false)
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,60 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.dynamicupdate;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.JoinColumn;
|
||||||
|
import javax.persistence.ManyToOne;
|
||||||
|
import javax.persistence.OneToMany;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.DynamicUpdate;
|
||||||
|
|
||||||
|
@Entity(name = "SamplingOrder")
|
||||||
|
@DynamicUpdate
|
||||||
|
public class SamplingOrder {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
private String note;
|
||||||
|
|
||||||
|
@OneToMany
|
||||||
|
private List<Customer> customers;
|
||||||
|
|
||||||
|
@ManyToOne(fetch = FetchType.LAZY)
|
||||||
|
@JoinColumn(name = "customerId")
|
||||||
|
private Customer customer;
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(Long id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getNote() {
|
||||||
|
return note;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setNote(String note) {
|
||||||
|
this.note = note;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Customer getCustomer() {
|
||||||
|
return customer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCustomer(Customer customer) {
|
||||||
|
this.customer = customer;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,65 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.bytecode.enhancement.lazy.proxy.inlinedirtychecking.dynamicupdate;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.FetchType;
|
||||||
|
import javax.persistence.JoinColumn;
|
||||||
|
import javax.persistence.JoinTable;
|
||||||
|
import javax.persistence.ManyToMany;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
import javax.validation.constraints.NotNull;
|
||||||
|
import javax.validation.constraints.Size;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.DynamicUpdate;
|
||||||
|
|
||||||
|
@Entity(name = "User")
|
||||||
|
@Table(name = "appuser")
|
||||||
|
@DynamicUpdate
|
||||||
|
public class User extends BaseEntity {
|
||||||
|
|
||||||
|
@Column( unique = true, nullable = false)
|
||||||
|
@NotNull
|
||||||
|
private String email;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@ManyToMany(fetch = FetchType.LAZY)
|
||||||
|
@JoinTable(name = "user_in_role", joinColumns = @JoinColumn(name = "userid"), inverseJoinColumns = @JoinColumn(name = "roleid"))
|
||||||
|
public Set<Role> roles = new HashSet<>();
|
||||||
|
|
||||||
|
public String getEmail() {
|
||||||
|
return email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEmail(String email) {
|
||||||
|
this.email = email;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<Role> getRoles() {
|
||||||
|
return roles;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRoles(Set<Role> roles) {
|
||||||
|
this.roles = roles;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addRole(Role role) {
|
||||||
|
this.roles.add( role );
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,6 +16,8 @@ import javax.persistence.criteria.CriteriaQuery;
|
||||||
import org.hibernate.LockMode;
|
import org.hibernate.LockMode;
|
||||||
import org.hibernate.LockOptions;
|
import org.hibernate.LockOptions;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.cfg.Configuration;
|
||||||
import org.hibernate.dialect.CockroachDialect;
|
import org.hibernate.dialect.CockroachDialect;
|
||||||
import org.hibernate.dialect.SQLServerDialect;
|
import org.hibernate.dialect.SQLServerDialect;
|
||||||
import org.hibernate.dialect.SybaseASE15Dialect;
|
import org.hibernate.dialect.SybaseASE15Dialect;
|
||||||
|
@ -55,6 +57,12 @@ public class LockModeTest extends BaseCoreFunctionalTestCase {
|
||||||
return new Class[] { A.class };
|
return new Class[] { A.class };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void configure(Configuration configuration) {
|
||||||
|
// We can't use a shared connection provider if we use TransactionUtil.setJdbcTimeout because that is set on the connection level
|
||||||
|
configuration.getProperties().remove( AvailableSettings.CONNECTION_PROVIDER );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void prepareTest() throws Exception {
|
public void prepareTest() throws Exception {
|
||||||
doInHibernate( this::sessionFactory, session -> {
|
doInHibernate( this::sessionFactory, session -> {
|
||||||
|
|
|
@ -125,6 +125,7 @@
|
||||||
FROM ORGANIZATION org
|
FROM ORGANIZATION org
|
||||||
LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER
|
LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER
|
||||||
WHERE org.ORGID=?
|
WHERE org.ORGID=?
|
||||||
|
ORDER BY emp.EMPID
|
||||||
</sql-query>
|
</sql-query>
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,10 @@ public abstract class AbstractBulkCompositeIdTest extends BaseCoreFunctionalTest
|
||||||
@Override
|
@Override
|
||||||
protected Configuration constructConfiguration() {
|
protected Configuration constructConfiguration() {
|
||||||
Configuration configuration = super.constructConfiguration();
|
Configuration configuration = super.constructConfiguration();
|
||||||
configuration.setProperty( AvailableSettings.QUERY_MULTI_TABLE_MUTATION_STRATEGY, getMultiTableBulkIdStrategyClass().getName() );
|
Class<? extends MultiTableBulkIdStrategy> strategyClass = getMultiTableBulkIdStrategyClass();
|
||||||
|
if ( strategyClass != null ) {
|
||||||
|
configuration.setProperty( AvailableSettings.QUERY_MULTI_TABLE_MUTATION_STRATEGY, strategyClass.getName() );
|
||||||
|
}
|
||||||
return configuration;
|
return configuration;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,8 @@ public class GlobalTemporaryTableBulkCompositeIdTest extends AbstractBulkComposi
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Class<? extends MultiTableBulkIdStrategy> getMultiTableBulkIdStrategyClass() {
|
protected Class<? extends MultiTableBulkIdStrategy> getMultiTableBulkIdStrategyClass() {
|
||||||
return GlobalTemporaryTableBulkIdStrategy.class;
|
// Since we only allow dialects that support global temporary tables, we avoid overriding the strategy
|
||||||
|
// This is important because otherwise we would loose id table configurations that are made in the dialects
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -144,9 +144,9 @@ public abstract class BaseEnversJPAFunctionalTestCase extends AbstractEnversTest
|
||||||
config.put( AvailableSettings.XML_FILE_NAMES, dds );
|
config.put( AvailableSettings.XML_FILE_NAMES, dds );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( !Environment.getProperties().containsKey( Environment.CONNECTION_PROVIDER ) ) {
|
|
||||||
config.put( GlobalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
config.put( GlobalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
||||||
config.put( LocalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
config.put( LocalTemporaryTableStrategy.DROP_ID_TABLES, "true" );
|
||||||
|
if ( !Environment.getProperties().containsKey( Environment.CONNECTION_PROVIDER ) ) {
|
||||||
config.put(
|
config.put(
|
||||||
org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER,
|
org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER,
|
||||||
SharedDriverManagerConnectionProviderImpl.getInstance()
|
SharedDriverManagerConnectionProviderImpl.getInstance()
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
.sdkmanrc
|
|
@ -0,0 +1,4 @@
|
||||||
|
# Enable auto-env through the sdkman_auto_env config
|
||||||
|
# Add key=value pairs of SDKs to use below
|
||||||
|
java=8.0.252.hs-adpt
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
hibernate.dialect org.hibernate.spatial.dialect.oracle.OracleSpatial10gDialect
|
hibernate.dialect org.hibernate.spatial.dialect.oracle.OracleSpatial10gDialect
|
||||||
hibernate.connection.driver_class oracle.jdbc.OracleDriver
|
hibernate.connection.driver_class oracle.jdbc.OracleDriver
|
||||||
hibernate.connection.url jdbc:oracle:thin:@localhost:1521:ORCLCDB
|
hibernate.connection.url jdbc:oracle:thin:@localhost:1521:ORCL
|
||||||
hibernate.connection.username C##hibernate
|
hibernate.connection.username C##hibernate
|
||||||
hibernate.connection.password hibernate
|
hibernate.connection.password hibernate
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
hibernate.dialect org.hibernate.spatial.dialect.oracle.OracleSpatial10gDialect
|
hibernate.dialect org.hibernate.spatial.dialect.oracle.OracleSpatial10gDialect
|
||||||
hibernate.connection.driver_class oracle.jdbc.OracleDriver
|
hibernate.connection.driver_class oracle.jdbc.OracleDriver
|
||||||
hibernate.connection.url jdbc:oracle:thin:@localhost:1521:ORCLCDB
|
hibernate.connection.url jdbc:oracle:thin:@localhost:1521:ORCL
|
||||||
hibernate.connection.username C##hibernate
|
hibernate.connection.username C##hibernate
|
||||||
hibernate.connection.password hibernate
|
hibernate.connection.password hibernate
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
hibernate.dialect org.hibernate.spatial.dialect.oracle.OracleSpatialSDO10gDialect
|
hibernate.dialect org.hibernate.spatial.dialect.oracle.OracleSpatialSDO10gDialect
|
||||||
hibernate.connection.driver_class oracle.jdbc.OracleDriver
|
hibernate.connection.driver_class oracle.jdbc.OracleDriver
|
||||||
hibernate.connection.url jdbc:oracle:thin:@localhost:1521:ORCLCDB
|
hibernate.connection.url jdbc:oracle:thin:@localhost:1521:ORCL
|
||||||
hibernate.connection.username C##hibernate
|
hibernate.connection.username C##hibernate
|
||||||
hibernate.connection.password hibernate
|
hibernate.connection.password hibernate
|
||||||
|
|
||||||
|
|
|
@ -4,4 +4,4 @@
|
||||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
*/
|
*/
|
||||||
jdbcDependency "postgresql:postgresql:8.4-701.jdbc4"
|
jdbcDependency 'org.postgresql:postgresql:42.2.2'
|
|
@ -5,16 +5,14 @@
|
||||||
# See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
# See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
#
|
#
|
||||||
|
|
||||||
hibernate.test.new_metadata_mappings = true
|
|
||||||
hibernate.dialect org.hibernate.spatial.dialect.postgis.PostgisPG95Dialect
|
hibernate.dialect org.hibernate.spatial.dialect.postgis.PostgisPG95Dialect
|
||||||
hibernate.connection.driver_class org.postgresql.Driver
|
hibernate.connection.driver_class org.postgresql.Driver
|
||||||
hibernate.connection.url jdbc:postgresql://localhost:9432
|
hibernate.connection.url jdbc:postgresql://localhost:9432/
|
||||||
hibernate.connection.username hibern8
|
hibernate.connection.username hibern8
|
||||||
hibernate.connection.password hibern8
|
hibernate.connection.password hibern8
|
||||||
|
|
||||||
|
|
||||||
hibernate.connection.pool_size 5
|
|
||||||
|
|
||||||
hibernate.show_sql true
|
hibernate.show_sql true
|
||||||
hibernate.format_sql true
|
hibernate.format_sql true
|
||||||
|
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
#
|
|
||||||
# Hibernate, Relational Persistence for Idiomatic Java
|
|
||||||
#
|
|
||||||
# License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
|
||||||
# See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
|
||||||
#
|
|
||||||
|
|
||||||
hibernate.dialect org.hibernate.spatial.dialect.sqlserver.SqlServer2008SpatialDialect
|
|
||||||
hibernate.connection.driver_class com.microsoft.sqlserver.jdbc.SQLServerDriver
|
|
||||||
hibernate.connection.url jdbc:sqlserver://localhost:1433;databaseName=TestDb
|
|
||||||
hibernate.connection.username hibern8
|
|
||||||
hibernate.connection.password langpaswoord123A%1
|
|
||||||
|
|
||||||
|
|
||||||
hibernate.connection.pool_size 5
|
|
||||||
|
|
||||||
hibernate.show_sql true
|
|
||||||
hibernate.format_sql true
|
|
||||||
|
|
||||||
hibernate.max_fetch_depth 5
|
|
||||||
|
|
||||||
hibernate.cache.region_prefix hibernate.test
|
|
||||||
hibernate.cache.region.factory_class org.hibernate.testing.cache.CachingRegionFactory
|
|
|
@ -1,11 +0,0 @@
|
||||||
/*
|
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
|
||||||
*
|
|
||||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
|
||||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
|
||||||
*/
|
|
||||||
repositories {
|
|
||||||
mavenLocal( )
|
|
||||||
}
|
|
||||||
|
|
||||||
jdbcDependency 'com.microsoft.sqlserver:mssql-jdbc:6.4.0.jre8'
|
|
|
@ -7,10 +7,11 @@
|
||||||
|
|
||||||
package org.hibernate.spatial;
|
package org.hibernate.spatial;
|
||||||
|
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
import org.hibernate.type.descriptor.WrapperOptions;
|
import org.hibernate.type.descriptor.WrapperOptions;
|
||||||
import org.hibernate.type.descriptor.java.AbstractTypeDescriptor;
|
import org.hibernate.type.descriptor.java.AbstractTypeDescriptor;
|
||||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
||||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptorRegistry;
|
|
||||||
|
|
||||||
import org.locationtech.jts.geom.Geometry;
|
import org.locationtech.jts.geom.Geometry;
|
||||||
import org.locationtech.jts.io.ParseException;
|
import org.locationtech.jts.io.ParseException;
|
||||||
|
@ -49,7 +50,7 @@ public class JTSGeometryJavaTypeDescriptor extends AbstractTypeDescriptor<Geomet
|
||||||
return reader.read( string );
|
return reader.read( string );
|
||||||
}
|
}
|
||||||
catch (ParseException e) {
|
catch (ParseException e) {
|
||||||
throw new RuntimeException( String.format( "Can't parse string %s as WKT", string ) );
|
throw new RuntimeException( String.format( Locale.ENGLISH, "Can't parse string %s as WKT", string ) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,6 @@ import org.hibernate.spatial.SpatialRelation;
|
||||||
import org.hibernate.type.StandardBasicTypes;
|
import org.hibernate.type.StandardBasicTypes;
|
||||||
import org.hibernate.type.Type;
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
import static java.lang.String.format;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author David Adler, Adtech Geospatial
|
* @author David Adler, Adtech Geospatial
|
||||||
* creation-date: 5/22/2014
|
* creation-date: 5/22/2014
|
||||||
|
@ -228,24 +226,11 @@ public class DB2SpatialDialect extends DB2Dialect implements SpatialDialect {
|
||||||
) );
|
) );
|
||||||
|
|
||||||
// Register non-SFS functions listed in Hibernate Spatial
|
// Register non-SFS functions listed in Hibernate Spatial
|
||||||
registerFunction( "dwithin", new DWithinFunction());
|
registerFunction( "dwithin", new DWithinFunction() );
|
||||||
|
|
||||||
// // The srid parameter needs to be explicitly cast to INTEGER to avoid a -245 SQLCODE,
|
|
||||||
// // ambiguous parameter.
|
|
||||||
// registerFunction( "transform", new SQLFunctionTemplate(
|
|
||||||
// geolatteGemetryType,
|
|
||||||
// "DB2GSE.ST_Transform(?1, CAST (?2 AS INTEGER))"
|
|
||||||
// ) );
|
|
||||||
|
|
||||||
registerFunction( "geomFromText", new StandardSQLFunction(
|
registerFunction( "geomFromText", new StandardSQLFunction(
|
||||||
"DB2GSE.ST_GeomFromText"
|
"DB2GSE.ST_GeomFromText"
|
||||||
) );
|
) );
|
||||||
|
|
||||||
// // Register spatial aggregate function
|
|
||||||
// registerFunction( "extent", new SQLFunctionTemplate(
|
|
||||||
// geolatteGemetryType,
|
|
||||||
// "db2gse.ST_GetAggrResult(MAX(db2gse.st_BuildMBRAggr(?1)))"
|
|
||||||
// ) );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -271,7 +256,7 @@ public class DB2SpatialDialect extends DB2Dialect implements SpatialDialect {
|
||||||
@Override
|
@Override
|
||||||
public String getSpatialAggregateSQL(String columnName, int type) {
|
public String getSpatialAggregateSQL(String columnName, int type) {
|
||||||
switch ( type ) {
|
switch ( type ) {
|
||||||
case SpatialAggregate.EXTENT: // same as extent function above???
|
case SpatialAggregate.EXTENT:
|
||||||
return "db2gse.ST_GetAggrResult(MAX(db2gse.st_BuildMBRAggr(" + columnName + ")))";
|
return "db2gse.ST_GetAggrResult(MAX(db2gse.st_BuildMBRAggr(" + columnName + ")))";
|
||||||
case SpatialAggregate.UNION:
|
case SpatialAggregate.UNION:
|
||||||
return "db2gse.ST_GetAggrResult(MAX(db2gse.st_BuildUnionAggr(" + columnName + ")))";
|
return "db2gse.ST_GetAggrResult(MAX(db2gse.st_BuildUnionAggr(" + columnName + ")))";
|
||||||
|
@ -304,7 +289,8 @@ public class DB2SpatialDialect extends DB2Dialect implements SpatialDialect {
|
||||||
if ( spatialRelation != SpatialRelation.DISJOINT ) {
|
if ( spatialRelation != SpatialRelation.DISJOINT ) {
|
||||||
return " db2gse." + relationName + "(" + columnName + ", ?) = 1 SELECTIVITY .0001";
|
return " db2gse." + relationName + "(" + columnName + ", ?) = 1 SELECTIVITY .0001";
|
||||||
}
|
}
|
||||||
else { // SELECTIVITY not supported for ST_Disjoint UDF
|
else {
|
||||||
|
// SELECTIVITY not supported for ST_Disjoint UDF
|
||||||
return " db2gse." + relationName + "(" + columnName + ", ?) = 1";
|
return " db2gse." + relationName + "(" + columnName + ", ?) = 1";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -328,17 +314,17 @@ public class DB2SpatialDialect extends DB2Dialect implements SpatialDialect {
|
||||||
private static class DWithinFunction extends StandardSQLFunction {
|
private static class DWithinFunction extends StandardSQLFunction {
|
||||||
|
|
||||||
public DWithinFunction() {
|
public DWithinFunction() {
|
||||||
super( "db2gse.ST_Dwithin" , StandardBasicTypes.NUMERIC_BOOLEAN);
|
super( "db2gse.ST_Dwithin", StandardBasicTypes.NUMERIC_BOOLEAN );
|
||||||
}
|
}
|
||||||
|
|
||||||
public String render(Type firstArgumentType, final List args, final SessionFactoryImplementor factory) {
|
public String render(Type firstArgumentType, final List args, final SessionFactoryImplementor factory) {
|
||||||
StringBuilder sb = new StringBuilder( "db2gse.ST_Intersects( " );
|
StringBuilder sb = new StringBuilder( "db2gse.ST_Intersects( " );
|
||||||
sb.append( (String)args.get(0) ) //
|
sb.append( (String) args.get( 0 ) )
|
||||||
.append(", db2gse.ST_Buffer(")
|
.append( ", db2gse.ST_Buffer(" )
|
||||||
.append((String)args.get(1) )
|
.append( (String) args.get( 1 ) )
|
||||||
.append(", ")
|
.append( ", " )
|
||||||
.append((String)args.get(2) )
|
.append( (String) args.get( 2 ) )
|
||||||
.append(", 'METER'))");
|
.append( ", 'METER'))" );
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
package org.hibernate.spatial.dialect.h2geodb;
|
package org.hibernate.spatial.dialect.h2geodb;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
@ -177,7 +178,7 @@ public class GeoDBDialect extends H2Dialect implements SpatialDialect {
|
||||||
Type firstArgumentType, List arguments, SessionFactoryImplementor sessionFactory) {
|
Type firstArgumentType, List arguments, SessionFactoryImplementor sessionFactory) {
|
||||||
int argumentCount = arguments.size();
|
int argumentCount = arguments.size();
|
||||||
if ( argumentCount != 2 ) {
|
if ( argumentCount != 2 ) {
|
||||||
throw new QueryException( String.format( "2 arguments expected, received %d", argumentCount ) );
|
throw new QueryException( String.format( Locale.ENGLISH,"2 arguments expected, received %d", argumentCount ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
return Stream.of(
|
return Stream.of(
|
||||||
|
|
|
@ -11,6 +11,7 @@ import java.sql.Connection;
|
||||||
import java.sql.PreparedStatement;
|
import java.sql.PreparedStatement;
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
import org.geolatte.geom.ByteBuffer;
|
import org.geolatte.geom.ByteBuffer;
|
||||||
import org.geolatte.geom.ByteOrder;
|
import org.geolatte.geom.ByteOrder;
|
||||||
|
@ -23,6 +24,10 @@ public class HANASpatialUtils {
|
||||||
|
|
||||||
private static final int POSTGIS_SRID_FLAG = 0x20000000;
|
private static final int POSTGIS_SRID_FLAG = 0x20000000;
|
||||||
|
|
||||||
|
private HANASpatialUtils(){
|
||||||
|
//prevent instantiation of Utility class
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("resource")
|
@SuppressWarnings("resource")
|
||||||
public static Geometry<?> toGeometry(ResultSet rs, String name) throws SQLException {
|
public static Geometry<?> toGeometry(ResultSet rs, String name) throws SQLException {
|
||||||
ByteBuffer buffer = toByteBuffer( rs.getObject( name ) );
|
ByteBuffer buffer = toByteBuffer( rs.getObject( name ) );
|
||||||
|
@ -36,7 +41,7 @@ public class HANASpatialUtils {
|
||||||
String columnName = null;
|
String columnName = null;
|
||||||
for ( int i = 1; i <= rs.getMetaData().getColumnCount(); i++ ) {
|
for ( int i = 1; i <= rs.getMetaData().getColumnCount(); i++ ) {
|
||||||
if ( name.equals( rs.getMetaData().getColumnLabel( i ) ) ||
|
if ( name.equals( rs.getMetaData().getColumnLabel( i ) ) ||
|
||||||
name.toUpperCase().equals( rs.getMetaData().getColumnLabel( i ) ) ) {
|
name.toUpperCase( Locale.ENGLISH ).equals( rs.getMetaData().getColumnLabel( i ) ) ) {
|
||||||
tableName = rs.getMetaData().getTableName( i );
|
tableName = rs.getMetaData().getTableName( i );
|
||||||
columnName = rs.getMetaData().getColumnName( i );
|
columnName = rs.getMetaData().getColumnName( i );
|
||||||
}
|
}
|
||||||
|
@ -80,17 +85,20 @@ public class HANASpatialUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ByteBuffer addCrsId(byte[] wkb, byte orderByte, int typeCode, int crsId) {
|
private static ByteBuffer addCrsId(byte[] wkb, byte orderByte, int typeCode, int crsId) {
|
||||||
ByteBuffer buffer = ByteBuffer.allocate( wkb.length + 4 ); // original capacity + 4 bytes for the CRS ID
|
// original capacity + 4 bytes for the CRS ID
|
||||||
|
ByteBuffer buffer = ByteBuffer.allocate( wkb.length + 4 );
|
||||||
buffer.setByteOrder( ByteOrder.valueOf( orderByte ) );
|
buffer.setByteOrder( ByteOrder.valueOf( orderByte ) );
|
||||||
|
|
||||||
buffer.put( orderByte ); // write byte order
|
// write byte order
|
||||||
|
buffer.put( orderByte );
|
||||||
|
|
||||||
buffer.putUInt( typeCode | POSTGIS_SRID_FLAG ); // set SRID flag
|
// set SRID flag
|
||||||
|
buffer.putUInt( typeCode | POSTGIS_SRID_FLAG );
|
||||||
|
|
||||||
buffer.putInt( crsId ); // write CRS ID
|
// write CRS ID
|
||||||
|
buffer.putInt( crsId );
|
||||||
|
|
||||||
// write remaining data
|
// write remaining data
|
||||||
|
|
||||||
for ( int i = 5; i < wkb.length; i++ ) {
|
for ( int i = 5; i < wkb.length; i++ ) {
|
||||||
buffer.put( wkb[i] );
|
buffer.put( wkb[i] );
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.spatial.dialect.mysql;
|
package org.hibernate.spatial.dialect.mysql;
|
||||||
|
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.hibernate.boot.model.TypeContributions;
|
import org.hibernate.boot.model.TypeContributions;
|
||||||
|
@ -93,7 +94,10 @@ public class MySQLSpatialDialect extends MySQLDialect implements SpatialDialect
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDWithinSQL(String columnName) {
|
public String getDWithinSQL(String columnName) {
|
||||||
throw new UnsupportedOperationException( String.format( "Mysql doesn't support the Dwithin function" ) );
|
throw new UnsupportedOperationException( String.format(
|
||||||
|
Locale.ENGLISH,
|
||||||
|
"Mysql doesn't support the Dwithin function"
|
||||||
|
) );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -42,11 +42,10 @@ class OracleSDOSupport implements SpatialDialect, Serializable, WithCustomJPAFil
|
||||||
OracleSpatial10gDialect.class.getName()
|
OracleSpatial10gDialect.class.getName()
|
||||||
);
|
);
|
||||||
|
|
||||||
private final boolean isOgcStrict;
|
|
||||||
private final SpatialFunctionsRegistry sdoFunctions;
|
private final SpatialFunctionsRegistry sdoFunctions;
|
||||||
|
|
||||||
OracleSDOSupport(boolean isOgcStrict) {
|
OracleSDOSupport(boolean isOgcStrict) {
|
||||||
this.isOgcStrict = isOgcStrict;
|
|
||||||
this.sdoFunctions = new OracleSpatialFunctions( isOgcStrict, this );
|
this.sdoFunctions = new OracleSpatialFunctions( isOgcStrict, this );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,7 +261,7 @@ class OracleSDOSupport implements SpatialDialect, Serializable, WithCustomJPAFil
|
||||||
aggregateFunction.append( "SDOAGGRTYPE(" );
|
aggregateFunction.append( "SDOAGGRTYPE(" );
|
||||||
}
|
}
|
||||||
aggregateFunction.append( columnName );
|
aggregateFunction.append( columnName );
|
||||||
// TODO tolerance must by configurable
|
// Can we make tolerance configurable
|
||||||
if ( sa.isAggregateType() ) {
|
if ( sa.isAggregateType() ) {
|
||||||
aggregateFunction.append( ", " ).append( .001 ).append( ")" );
|
aggregateFunction.append( ", " ).append( .001 ).append( ")" );
|
||||||
}
|
}
|
||||||
|
@ -292,7 +291,7 @@ class OracleSDOSupport implements SpatialDialect, Serializable, WithCustomJPAFil
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String getHavingSridSQL(String columnName) {
|
public String getHavingSridSQL(String columnName) {
|
||||||
return String.format( " (MDSYS.ST_GEOMETRY(%s).ST_SRID() = ?)", columnName , Locale.US);
|
return String.format( Locale.ENGLISH, " (MDSYS.ST_GEOMETRY(%s).ST_SRID() = ?)", columnName );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -306,7 +305,12 @@ class OracleSDOSupport implements SpatialDialect, Serializable, WithCustomJPAFil
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String getIsEmptySQL(String columnName, boolean isEmpty) {
|
public String getIsEmptySQL(String columnName, boolean isEmpty) {
|
||||||
return String.format( "( MDSYS.ST_GEOMETRY(%s).ST_ISEMPTY() = %d )", columnName, isEmpty ? 1 : 0 , Locale.US);
|
return String.format(
|
||||||
|
Locale.ENGLISH,
|
||||||
|
"( MDSYS.ST_GEOMETRY(%s).ST_ISEMPTY() = %d )",
|
||||||
|
columnName,
|
||||||
|
isEmpty ? 1 : 0
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
package org.hibernate.spatial.dialect.postgis;
|
package org.hibernate.spatial.dialect.postgis;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
@ -226,7 +227,11 @@ class PostgisFunctions extends SpatialFunctionsRegistry {
|
||||||
Type firstArgumentType, List arguments, SessionFactoryImplementor sessionFactory) {
|
Type firstArgumentType, List arguments, SessionFactoryImplementor sessionFactory) {
|
||||||
int argumentCount = arguments.size();
|
int argumentCount = arguments.size();
|
||||||
if ( argumentCount != 2 ) {
|
if ( argumentCount != 2 ) {
|
||||||
throw new QueryException( String.format( "2 arguments expected, received %d", argumentCount ) );
|
throw new QueryException( String.format(
|
||||||
|
Locale.ENGLISH,
|
||||||
|
"2 arguments expected, received %d",
|
||||||
|
argumentCount
|
||||||
|
) );
|
||||||
}
|
}
|
||||||
|
|
||||||
return Stream.of(
|
return Stream.of(
|
||||||
|
|
|
@ -1,17 +1,28 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.dialect.hana;
|
package org.hibernate.spatial.dialect.hana;
|
||||||
|
|
||||||
import static java.lang.String.format;
|
import static java.lang.String.format;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.hibernate.cfg.Configuration;
|
import org.hibernate.cfg.Configuration;
|
||||||
import org.hibernate.spatial.HSMessageLogger;
|
import org.hibernate.spatial.HSMessageLogger;
|
||||||
import org.hibernate.spatial.integration.TestSpatialFunctions;
|
import org.hibernate.spatial.integration.TestSpatialFunctions;
|
||||||
import org.hibernate.spatial.testing.dialects.hana.HANAExpectationsFactory;
|
import org.hibernate.spatial.testing.dialects.hana.HANAExpectationsFactory;
|
||||||
|
|
||||||
import org.hibernate.testing.RequiresDialect;
|
import org.hibernate.testing.RequiresDialect;
|
||||||
|
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.locationtech.jts.geom.Geometry;
|
import org.locationtech.jts.geom.Geometry;
|
||||||
|
@ -24,7 +35,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
private static final HSMessageLogger LOG = Logger.getMessageLogger(
|
private static final HSMessageLogger LOG = Logger.getMessageLogger(
|
||||||
HSMessageLogger.class,
|
HSMessageLogger.class,
|
||||||
TestHANASpatialFunctions.class.getName() );
|
TestHANASpatialFunctions.class.getName()
|
||||||
|
);
|
||||||
|
|
||||||
protected HANAExpectationsFactory hanaExpectationsFactory;
|
protected HANAExpectationsFactory hanaExpectationsFactory;
|
||||||
|
|
||||||
|
@ -52,8 +64,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
public void alphashape(String pckg) throws SQLException {
|
public void alphashape(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getAlphaShape( 1 );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getAlphaShape( 1 );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
|
Locale.ENGLISH,
|
||||||
"SELECT id, alphashape(geom, 1) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point', 'ST_MultiPoint')",
|
"SELECT id, alphashape(geom, 1) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point', 'ST_MultiPoint')",
|
||||||
pckg );
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +85,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getArea();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getArea();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, area(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon')",
|
"SELECT id, area(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon')",
|
||||||
pckg );
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,7 +166,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void assvgaggr(String pckg) throws SQLException {
|
public void assvgaggr(String pckg) throws SQLException {
|
||||||
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsSVGAggr();
|
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsSVGAggr();
|
||||||
String hql = format( "SELECT cast(count(g) as int), assvgaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g", pckg );
|
String hql = format(
|
||||||
|
"SELECT cast(count(g) as int), assvgaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,7 +217,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void convexhullaggr(String pckg) throws SQLException {
|
public void convexhullaggr(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getConvexHullAggr();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getConvexHullAggr();
|
||||||
String hql = format( "SELECT cast(count(g) as int), convexhullaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g", pckg );
|
String hql = format(
|
||||||
|
"SELECT cast(count(g) as int), convexhullaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,7 +236,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void centroid(String pckg) throws SQLException {
|
public void centroid(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getCentroid();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getCentroid();
|
||||||
String hql = format( "SELECT id, centroid(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_Polygon'", pckg );
|
String hql = format(
|
||||||
|
"SELECT id, centroid(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_Polygon'",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,7 +273,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCoveredBy( expectationsFactory.getTestPolygon() );
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCoveredBy( expectationsFactory.getTestPolygon() );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, coveredby(geom, :filter) FROM org.hibernate.spatial.integration.%s.GeomEntity where coveredby(geom, :filter) = true and srid(geom) = %d",
|
"SELECT id, coveredby(geom, :filter) FROM org.hibernate.spatial.integration.%s.GeomEntity where coveredby(geom, :filter) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -268,7 +294,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCovers( expectationsFactory.getTestPolygon() );
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCovers( expectationsFactory.getTestPolygon() );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, covers(geom, :filter) FROM org.hibernate.spatial.integration.%s.GeomEntity where covers(geom, :filter) = true and srid(geom) = %d",
|
"SELECT id, covers(geom, :filter) FROM org.hibernate.spatial.integration.%s.GeomEntity where covers(geom, :filter) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -285,8 +313,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void endpoint(String pckg) throws SQLException {
|
public void endpoint(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEndPoint();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEndPoint();
|
||||||
String hql = format( "SELECT id, endpoint(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_LineString'",
|
String hql = format(
|
||||||
pckg );
|
"SELECT id, endpoint(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_LineString'",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -302,7 +332,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void envelopeaggr(String pckg) throws SQLException {
|
public void envelopeaggr(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEnvelopeAggr();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEnvelopeAggr();
|
||||||
String hql = format( "SELECT cast(count(g) as int), envelopeaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g", pckg );
|
String hql = format(
|
||||||
|
"SELECT cast(count(g) as int), envelopeaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -318,8 +351,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void exteriorring(String pckg) throws SQLException {
|
public void exteriorring(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getExteriorRing();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getExteriorRing();
|
||||||
String hql = format( "SELECT id, exteriorring(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_Polygon'",
|
String hql = format(
|
||||||
pckg );
|
"SELECT id, exteriorring(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_Polygon'",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,8 +372,11 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
WKBWriter writer = new WKBWriter( 2, true );
|
WKBWriter writer = new WKBWriter( 2, true );
|
||||||
byte[] ewkb = writer.write( expectationsFactory.getTestPolygon() );
|
byte[] ewkb = writer.write( expectationsFactory.getTestPolygon() );
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKB( ewkb );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKB( ewkb );
|
||||||
String hql = format( "SELECT 1, cast(geomfromewkb(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
String hql = format(
|
||||||
getGeometryTypeFromPackage( pckg ), pckg );
|
"SELECT 1, cast(geomfromewkb(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
getGeometryTypeFromPackage( pckg ),
|
||||||
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "param", ewkb );
|
Map<String, Object> params = createQueryParams( "param", ewkb );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -357,8 +395,11 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
WKTWriter writer = new WKTWriter();
|
WKTWriter writer = new WKTWriter();
|
||||||
String ewkt = "SRID=" + expectationsFactory.getTestSrid() + ";" + writer.write( expectationsFactory.getTestPolygon() );
|
String ewkt = "SRID=" + expectationsFactory.getTestSrid() + ";" + writer.write( expectationsFactory.getTestPolygon() );
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKT( ewkt );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKT( ewkt );
|
||||||
String hql = format( "SELECT 1, cast(geomfromewkt(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
String hql = format(
|
||||||
getGeometryTypeFromPackage( pckg ), pckg );
|
"SELECT 1, cast(geomfromewkt(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
getGeometryTypeFromPackage( pckg ),
|
||||||
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "param", ewkt );
|
Map<String, Object> params = createQueryParams( "param", ewkt );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -376,8 +417,11 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
public void geomfromtext(String pckg) throws SQLException {
|
public void geomfromtext(String pckg) throws SQLException {
|
||||||
String text = expectationsFactory.getTestPolygon().toText();
|
String text = expectationsFactory.getTestPolygon().toText();
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromText( text );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromText( text );
|
||||||
String hql = format( "SELECT 1, cast(geomfromtext(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
String hql = format(
|
||||||
getGeometryTypeFromPackage( pckg ), pckg );
|
"SELECT 1, cast(geomfromtext(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
getGeometryTypeFromPackage( pckg ),
|
||||||
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "param", text );
|
Map<String, Object> params = createQueryParams( "param", text );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -396,8 +440,11 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
WKBWriter writer = new WKBWriter( 2, false );
|
WKBWriter writer = new WKBWriter( 2, false );
|
||||||
byte[] wkb = writer.write( expectationsFactory.getTestPolygon() );
|
byte[] wkb = writer.write( expectationsFactory.getTestPolygon() );
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKB( wkb );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKB( wkb );
|
||||||
String hql = format( "SELECT 1, cast(geomfromwkb(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
String hql = format(
|
||||||
getGeometryTypeFromPackage( pckg ), pckg );
|
"SELECT 1, cast(geomfromwkb(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
getGeometryTypeFromPackage( pckg ),
|
||||||
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "param", wkb );
|
Map<String, Object> params = createQueryParams( "param", wkb );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -416,8 +463,11 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
WKTWriter writer = new WKTWriter();
|
WKTWriter writer = new WKTWriter();
|
||||||
String wkt = writer.write( expectationsFactory.getTestPolygon() );
|
String wkt = writer.write( expectationsFactory.getTestPolygon() );
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKT( wkt );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKT( wkt );
|
||||||
String hql = format( "SELECT 1, cast(geomfromwkt(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
String hql = format(
|
||||||
getGeometryTypeFromPackage( pckg ), pckg );
|
"SELECT 1, cast(geomfromwkt(:param) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
getGeometryTypeFromPackage( pckg ),
|
||||||
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "param", wkt );
|
Map<String, Object> params = createQueryParams( "param", wkt );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -436,7 +486,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeometryN( 1 );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeometryN( 1 );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, cast(geometryn(geom, :n) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_GeometryCollection'",
|
"SELECT id, cast(geometryn(geom, :n) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_GeometryCollection'",
|
||||||
getGeometryTypeFromPackage( pckg ), pckg );
|
getGeometryTypeFromPackage( pckg ),
|
||||||
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "n", 1 );
|
Map<String, Object> params = createQueryParams( "n", 1 );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -455,7 +507,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getInteriorRingN( 1 );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getInteriorRingN( 1 );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, cast(interiorringn(geom, :n) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_Polygon'",
|
"SELECT id, cast(interiorringn(geom, :n) as %s) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_Polygon'",
|
||||||
getGeometryTypeFromPackage( pckg ), pckg );
|
getGeometryTypeFromPackage( pckg ),
|
||||||
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "n", 1 );
|
Map<String, Object> params = createQueryParams( "n", 1 );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -472,7 +526,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void intersectionaggr(String pckg) throws SQLException {
|
public void intersectionaggr(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getIntersectionAggr();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getIntersectionAggr();
|
||||||
String hql = format( "SELECT cast(count(g) as int), intersectionaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g", pckg );
|
String hql = format(
|
||||||
|
"SELECT cast(count(g) as int), intersectionaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -487,11 +544,15 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void intersectsrect(String pckg) throws SQLException {
|
public void intersectsrect(String pckg) throws SQLException {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIntersectsRect( (Point) expectationsFactory.getTestPoint().reverse(),
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIntersectsRect(
|
||||||
expectationsFactory.getTestPoint() );
|
(Point) expectationsFactory.getTestPoint().reverse(),
|
||||||
|
expectationsFactory.getTestPoint()
|
||||||
|
);
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, intersectsrect(geom, :pmin, :pmax) FROM org.hibernate.spatial.integration.%s.GeomEntity where intersectsrect(geom, :pmin, :pmax) = true and srid(geom) = %d",
|
"SELECT id, intersectsrect(geom, :pmin, :pmax) FROM org.hibernate.spatial.integration.%s.GeomEntity where intersectsrect(geom, :pmin, :pmax) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "pmin", expectationsFactory.getTestPoint().reverse() );
|
Map<String, Object> params = createQueryParams( "pmin", expectationsFactory.getTestPoint().reverse() );
|
||||||
params.put( "pmax", expectationsFactory.getTestPoint() );
|
params.put( "pmax", expectationsFactory.getTestPoint() );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
|
@ -511,7 +572,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIs3D();
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIs3D();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, is3d(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where is3d(geom) = true and srid(geom) = %d",
|
"SELECT id, is3d(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where is3d(geom) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -529,7 +592,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsClosed();
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsClosed();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, isclosed(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and isclosed(geom) = true and srid(geom) = %d",
|
"SELECT id, isclosed(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and isclosed(geom) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -547,7 +612,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsMeasured();
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsMeasured();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, ismeasured(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where ismeasured(geom) = true and srid(geom) = %d",
|
"SELECT id, ismeasured(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where ismeasured(geom) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -565,7 +632,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsRing();
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsRing();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, isring(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
|
"SELECT id, isring(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -583,7 +652,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsValid();
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsValid();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, isvalid(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where isvalid(geom) = true and srid(geom) = %d",
|
"SELECT id, isvalid(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where isvalid(geom) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -601,7 +672,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getLength();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getLength();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, length(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and srid(geom) = %d",
|
"SELECT id, length(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -619,7 +692,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getM();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getM();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, m(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
"SELECT id, m(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -637,7 +712,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMax();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMax();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, mmax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, mmax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -655,7 +731,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMin();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMin();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, mmin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, mmin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -673,7 +750,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumGeometries();
|
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumGeometries();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, numgeometries(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_GeometryCollection') and srid(geom) = %d",
|
"SELECT id, numgeometries(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_GeometryCollection') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -691,7 +770,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRing();
|
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRing();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, numinteriorring(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
|
"SELECT id, numinteriorring(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -709,7 +790,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRings();
|
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRings();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, numinteriorrings(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
|
"SELECT id, numinteriorrings(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -727,7 +810,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumPoints();
|
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumPoints();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, numpoints(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
|
"SELECT id, numpoints(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -745,7 +830,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getOrderingEquals( expectationsFactory.getTestPolygon() );
|
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getOrderingEquals( expectationsFactory.getTestPolygon() );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, orderingequals(geom, :filter) FROM org.hibernate.spatial.integration.%s.GeomEntity where orderingequals(geom, :filter) = true and srid(geom) = %d",
|
"SELECT id, orderingequals(geom, :filter) FROM org.hibernate.spatial.integration.%s.GeomEntity where orderingequals(geom, :filter) = true and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
@ -764,7 +851,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getPerimeter();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getPerimeter();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, perimeter(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
|
"SELECT id, perimeter(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -782,7 +871,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointOnSurface();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointOnSurface();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, pointonsurface(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
|
"SELECT id, pointonsurface(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -800,17 +891,21 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointN( 1 );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointN( 1 );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, pointn(geom, :n) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
|
"SELECT id, pointn(geom, :n) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "n", 1 );
|
Map<String, Object> params = createQueryParams( "n", 1 );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(expected = SQLException.class) // ST_GEOMETRY columns are not supported
|
// ST_GEOMETRY columns are not supported
|
||||||
|
@Test(expected = SQLException.class)
|
||||||
public void test_snaptogrid_on_jts() throws SQLException {
|
public void test_snaptogrid_on_jts() throws SQLException {
|
||||||
snaptogrid( JTS );
|
snaptogrid( JTS );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(expected = SQLException.class) // ST_GEOMETRY columns are not supported
|
// ST_GEOMETRY columns are not supported
|
||||||
|
@Test(expected = SQLException.class)
|
||||||
public void test_snaptogrid_on_geolatte() throws SQLException {
|
public void test_snaptogrid_on_geolatte() throws SQLException {
|
||||||
snaptogrid( GEOLATTE );
|
snaptogrid( GEOLATTE );
|
||||||
}
|
}
|
||||||
|
@ -819,7 +914,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getSnapToGrid();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getSnapToGrid();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, snaptogrid(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, snaptogrid(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -835,8 +931,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void startpoint(String pckg) throws SQLException {
|
public void startpoint(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getStartPoint();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getStartPoint();
|
||||||
String hql = format( "SELECT id, startpoint(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_LineString'",
|
String hql = format(
|
||||||
pckg );
|
"SELECT id, startpoint(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g where geometrytype(geom) = 'ST_LineString'",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -852,7 +950,10 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
|
|
||||||
public void unionaggr(String pckg) throws SQLException {
|
public void unionaggr(String pckg) throws SQLException {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getUnionAggr();
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getUnionAggr();
|
||||||
String hql = format( "SELECT cast(count(g) as int), unionaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g", pckg );
|
String hql = format(
|
||||||
|
"SELECT cast(count(g) as int), unionaggr(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity g",
|
||||||
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -870,7 +971,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getX();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getX();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, x(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
"SELECT id, x(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -888,7 +991,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMax();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMax();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, xmax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, xmax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -906,7 +1010,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMin();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMin();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, xmin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, xmin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -924,7 +1029,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getY();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getY();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, y(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
"SELECT id, y(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -942,7 +1049,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMax();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMax();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, ymax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, ymax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -960,7 +1068,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMin();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMin();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, ymin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, ymin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -978,7 +1087,9 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZ();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZ();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, z(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
"SELECT id, z(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg,
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -996,7 +1107,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMax();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMax();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, zmax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, zmax(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1014,7 +1126,8 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMin();
|
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMin();
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, zmin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
"SELECT id, zmin(geom) FROM org.hibernate.spatial.integration.%s.GeomEntity where srid(geom) = %d",
|
||||||
pckg, expectationsFactory.getTestSrid() );
|
pckg, expectationsFactory.getTestSrid()
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1032,14 +1145,16 @@ public class TestHANASpatialFunctions extends TestSpatialFunctions {
|
||||||
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getNestedFunctionInner( expectationsFactory.getTestPolygon() );
|
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getNestedFunctionInner( expectationsFactory.getTestPolygon() );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, geom FROM org.hibernate.spatial.integration.%s.GeomEntity g where dwithin(geom, srid(:filter, 0), 1) = true",
|
"SELECT id, geom FROM org.hibernate.spatial.integration.%s.GeomEntity g where dwithin(geom, srid(:filter, 0), 1) = true",
|
||||||
pckg );
|
pckg
|
||||||
|
);
|
||||||
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
|
|
||||||
dbexpected = hanaExpectationsFactory.getNestedFunctionOuter( expectationsFactory.getTestPolygon() );
|
dbexpected = hanaExpectationsFactory.getNestedFunctionOuter( expectationsFactory.getTestPolygon() );
|
||||||
hql = format(
|
hql = format(
|
||||||
"SELECT id, geom FROM org.hibernate.spatial.integration.%s.GeomEntity g where dwithin(:filter, srid(geom, 0), 1) = true",
|
"SELECT id, geom FROM org.hibernate.spatial.integration.%s.GeomEntity g where dwithin(:filter, srid(geom, 0), 1) = true",
|
||||||
pckg );
|
pckg
|
||||||
|
);
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,9 +32,9 @@ import static org.junit.Assert.assertEquals;
|
||||||
*/
|
*/
|
||||||
public class PostgisUnmarshalTest {
|
public class PostgisUnmarshalTest {
|
||||||
|
|
||||||
private CoordinateReferenceSystem<G2D> crs = CoordinateReferenceSystems.WGS84;
|
private final CoordinateReferenceSystem<G2D> crs = CoordinateReferenceSystems.WGS84;
|
||||||
private Geometry<G2D> geom = linestring( crs, g( 6.123, 53.234 ), g( 6.133, 53.244 ) );
|
private final Geometry<G2D> geom = linestring( crs, g( 6.123, 53.234 ), g( 6.133, 53.244 ) );
|
||||||
private Geometry<C2D> geomNoSrid = linestring(
|
private final Geometry<C2D> geomNoSrid = linestring(
|
||||||
CoordinateReferenceSystems.PROJECTED_2D_METER,
|
CoordinateReferenceSystems.PROJECTED_2D_METER,
|
||||||
c( 6.123, 53.234 ),
|
c( 6.123, 53.234 ),
|
||||||
c( 6.133, 53.244 )
|
c( 6.133, 53.244 )
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.integration;
|
package org.hibernate.spatial.integration;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -65,7 +72,7 @@ public abstract class AbstractTestStoreRetrieve<G, E extends GeomEntityLike<G>>
|
||||||
tx = session.beginTransaction();
|
tx = session.beginTransaction();
|
||||||
for ( E storedEntity : stored.values() ) {
|
for ( E storedEntity : stored.values() ) {
|
||||||
id = storedEntity.getId();
|
id = storedEntity.getId();
|
||||||
E retrievedEntity = (E) session.get( getGeomEntityClass(), id );
|
E retrievedEntity = session.get( getGeomEntityClass(), id );
|
||||||
G retrievedGeometry = retrievedEntity.getGeom();
|
G retrievedGeometry = retrievedEntity.getGeom();
|
||||||
G storedGeometry = storedEntity.getGeom();
|
G storedGeometry = storedEntity.getGeom();
|
||||||
String msg = createFailureMessage( storedEntity.getId(), storedGeometry, retrievedGeometry );
|
String msg = createFailureMessage( storedEntity.getId(), storedGeometry, retrievedGeometry );
|
||||||
|
@ -109,7 +116,6 @@ public abstract class AbstractTestStoreRetrieve<G, E extends GeomEntityLike<G>>
|
||||||
for ( TestDataElement element : testData ) {
|
for ( TestDataElement element : testData ) {
|
||||||
id = element.id;
|
id = element.id;
|
||||||
tx = session.beginTransaction();
|
tx = session.beginTransaction();
|
||||||
;
|
|
||||||
E entity = createFrom( element, dialect );
|
E entity = createFrom( element, dialect );
|
||||||
stored.put( entity.getId(), entity );
|
stored.put( entity.getId(), entity );
|
||||||
session.save( entity );
|
session.save( entity );
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.integration;
|
package org.hibernate.spatial.integration;
|
||||||
|
|
||||||
import org.hibernate.dialect.AbstractHANADialect;
|
import org.hibernate.dialect.AbstractHANADialect;
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.integration;
|
package org.hibernate.spatial.integration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -41,7 +41,7 @@ import static org.junit.Assert.fail;
|
||||||
@SkipForDialect(value = HANASpatialDialect.class, comment = "The HANA dialect is tested via org.hibernate.spatial.dialect.hana.TestHANASpatialFunctions", jiraKey = "HHH-12426")
|
@SkipForDialect(value = HANASpatialDialect.class, comment = "The HANA dialect is tested via org.hibernate.spatial.dialect.hana.TestHANASpatialFunctions", jiraKey = "HHH-12426")
|
||||||
public class TestGeolatteSpatialPredicates extends SpatialFunctionalTestCase {
|
public class TestGeolatteSpatialPredicates extends SpatialFunctionalTestCase {
|
||||||
|
|
||||||
private static HSMessageLogger LOG = Logger.getMessageLogger(
|
private static final HSMessageLogger LOG = Logger.getMessageLogger(
|
||||||
HSMessageLogger.class,
|
HSMessageLogger.class,
|
||||||
TestGeolatteSpatialPredicates.class.getName()
|
TestGeolatteSpatialPredicates.class.getName()
|
||||||
);
|
);
|
||||||
|
|
|
@ -41,7 +41,7 @@ import static org.junit.Assert.fail;
|
||||||
@SkipForDialect(value = HANASpatialDialect.class, comment = "The HANA dialect is tested via org.hibernate.spatial.dialect.hana.TestHANASpatialFunctions", jiraKey = "HHH-12426")
|
@SkipForDialect(value = HANASpatialDialect.class, comment = "The HANA dialect is tested via org.hibernate.spatial.dialect.hana.TestHANASpatialFunctions", jiraKey = "HHH-12426")
|
||||||
public class TestJTSSpatialPredicates extends SpatialFunctionalTestCase {
|
public class TestJTSSpatialPredicates extends SpatialFunctionalTestCase {
|
||||||
|
|
||||||
private static HSMessageLogger LOG = Logger.getMessageLogger(
|
private static final HSMessageLogger LOG = Logger.getMessageLogger(
|
||||||
HSMessageLogger.class,
|
HSMessageLogger.class,
|
||||||
TestJTSSpatialPredicates.class.getName()
|
TestJTSSpatialPredicates.class.getName()
|
||||||
);
|
);
|
||||||
|
|
|
@ -473,7 +473,9 @@ public class TestSpatialFunctions extends SpatialFunctionalTestCase {
|
||||||
}
|
}
|
||||||
Map<Integer, Double> dbexpected = expectationsFactory.getDistance( expectationsFactory.getTestPolygon() );
|
Map<Integer, Double> dbexpected = expectationsFactory.getDistance( expectationsFactory.getTestPolygon() );
|
||||||
String hql = format(
|
String hql = format(
|
||||||
"SELECT id, distance(geom, :filter) from %s where srid(geom) = %d", entityName( pckg ), expectationsFactory.getTestSrid()
|
"SELECT id, distance(geom, :filter) from %s where srid(geom) = %d",
|
||||||
|
entityName( pckg ),
|
||||||
|
expectationsFactory.getTestSrid()
|
||||||
);
|
);
|
||||||
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
|
||||||
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
|
||||||
|
|
|
@ -9,6 +9,7 @@ package org.hibernate.spatial.integration;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
|
@ -35,7 +36,7 @@ import static org.junit.Assert.fail;
|
||||||
@SkipForDialect(value = HANASpatialDialect.class, comment = "The HANA dialect is tested via org.hibernate.spatial.dialect.hana.TestHANASpatialFunctions", jiraKey = "HHH-12426")
|
@SkipForDialect(value = HANASpatialDialect.class, comment = "The HANA dialect is tested via org.hibernate.spatial.dialect.hana.TestHANASpatialFunctions", jiraKey = "HHH-12426")
|
||||||
public class TestSpatialRestrictions extends SpatialFunctionalTestCase {
|
public class TestSpatialRestrictions extends SpatialFunctionalTestCase {
|
||||||
|
|
||||||
private static HSMessageLogger LOG = Logger.getMessageLogger(
|
private static final HSMessageLogger LOG = Logger.getMessageLogger(
|
||||||
HSMessageLogger.class,
|
HSMessageLogger.class,
|
||||||
TestSpatialRestrictions.class.getName()
|
TestSpatialRestrictions.class.getName()
|
||||||
);
|
);
|
||||||
|
@ -207,7 +208,11 @@ public class TestSpatialRestrictions extends SpatialFunctionalTestCase {
|
||||||
if ( entry.getValue() ) {
|
if ( entry.getValue() ) {
|
||||||
cnt++;
|
cnt++;
|
||||||
if ( !findInList( entry.getKey(), (List<JtsGeomEntity>) list ) ) {
|
if ( !findInList( entry.getKey(), (List<JtsGeomEntity>) list ) ) {
|
||||||
fail( String.format( "Expected object with id= %d, but not found in result", entry.getKey() ) );
|
fail( String.format(
|
||||||
|
Locale.ENGLISH,
|
||||||
|
"Expected object with id= %d, but not found in result",
|
||||||
|
entry.getKey()
|
||||||
|
) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,11 +88,7 @@ public class GeomEntity implements GeomEntityLike<Geometry> {
|
||||||
|
|
||||||
GeomEntity geomEntity = (GeomEntity) o;
|
GeomEntity geomEntity = (GeomEntity) o;
|
||||||
|
|
||||||
if ( !id.equals( geomEntity.id ) ) {
|
return id.equals( geomEntity.id );
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.geolatte.geom.codec.WktDecodeException;
|
||||||
@Skip(condition = SpatialDialectMatcher.class, message = "No Spatial Dialect")
|
@Skip(condition = SpatialDialectMatcher.class, message = "No Spatial Dialect")
|
||||||
public class TestStoreRetrieveUsingGeolatte extends AbstractTestStoreRetrieve<Geometry, GeomEntity> {
|
public class TestStoreRetrieveUsingGeolatte extends AbstractTestStoreRetrieve<Geometry, GeomEntity> {
|
||||||
|
|
||||||
private static HSMessageLogger LOG = Logger.getMessageLogger(
|
private static final HSMessageLogger LOG = Logger.getMessageLogger(
|
||||||
HSMessageLogger.class,
|
HSMessageLogger.class,
|
||||||
TestStoreRetrieveUsingGeolatte.class.getName()
|
TestStoreRetrieveUsingGeolatte.class.getName()
|
||||||
);
|
);
|
||||||
|
|
|
@ -82,11 +82,7 @@ public class JtsGeomEntity implements GeomEntityLike<Geometry> {
|
||||||
|
|
||||||
JtsGeomEntity geomEntity = (JtsGeomEntity) o;
|
JtsGeomEntity geomEntity = (JtsGeomEntity) o;
|
||||||
|
|
||||||
if ( !id.equals( geomEntity.id ) ) {
|
return id.equals( geomEntity.id );
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -793,8 +793,7 @@ public abstract class AbstractExpectationsFactory {
|
||||||
case STRING:
|
case STRING:
|
||||||
expected.put( id, (T) results.getString( 2 ) );
|
expected.put( id, (T) results.getString( 2 ) );
|
||||||
break;
|
break;
|
||||||
case INTEGER:
|
case INTEGER: {
|
||||||
{
|
|
||||||
Long value = Long.valueOf( results.getLong( 2 ) );
|
Long value = Long.valueOf( results.getLong( 2 ) );
|
||||||
if ( results.wasNull() ) {
|
if ( results.wasNull() ) {
|
||||||
value = null; // This is required because the Hibernate BasicExtractor also checks ResultSet#wasNull which can lead to a mismatch between the expected and the actual results
|
value = null; // This is required because the Hibernate BasicExtractor also checks ResultSet#wasNull which can lead to a mismatch between the expected and the actual results
|
||||||
|
@ -802,8 +801,7 @@ public abstract class AbstractExpectationsFactory {
|
||||||
expected.put( id, (T) value );
|
expected.put( id, (T) value );
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case DOUBLE:
|
case DOUBLE: {
|
||||||
{
|
|
||||||
Double value = Double.valueOf( results.getDouble( 2 ) );
|
Double value = Double.valueOf( results.getDouble( 2 ) );
|
||||||
if ( results.wasNull() ) {
|
if ( results.wasNull() ) {
|
||||||
value = null; //this is required because SQL Server converts automatically null to 0.0
|
value = null; //this is required because SQL Server converts automatically null to 0.0
|
||||||
|
|
|
@ -14,6 +14,7 @@ import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.StringWriter;
|
import java.io.StringWriter;
|
||||||
import java.nio.charset.Charset;
|
import java.nio.charset.Charset;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
import java.sql.PreparedStatement;
|
import java.sql.PreparedStatement;
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
|
@ -42,7 +43,7 @@ import org.geolatte.geom.codec.WktDecoder;
|
||||||
public class DataSourceUtils {
|
public class DataSourceUtils {
|
||||||
|
|
||||||
|
|
||||||
private static HSMessageLogger LOG = Logger.getMessageLogger(
|
private static final HSMessageLogger LOG = Logger.getMessageLogger(
|
||||||
HSMessageLogger.class,
|
HSMessageLogger.class,
|
||||||
DataSourceUtils.class.getName()
|
DataSourceUtils.class.getName()
|
||||||
);
|
);
|
||||||
|
@ -257,7 +258,7 @@ public class DataSourceUtils {
|
||||||
BufferedReader reader = null;
|
BufferedReader reader = null;
|
||||||
try {
|
try {
|
||||||
reader = new BufferedReader(
|
reader = new BufferedReader(
|
||||||
new InputStreamReader( is, Charset.forName( "UTF-8" ) )
|
new InputStreamReader( is, StandardCharsets.UTF_8 )
|
||||||
);
|
);
|
||||||
|
|
||||||
StringWriter sw = new StringWriter();
|
StringWriter sw = new StringWriter();
|
||||||
|
|
|
@ -1,7 +1,14 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.testing;
|
package org.hibernate.spatial.testing;
|
||||||
|
|
||||||
import org.geolatte.geom.Geometry;
|
import org.geolatte.geom.Geometry;
|
||||||
import org.geolatte.geom.GeometryPointEquality;
|
import org.geolatte.geom.GeometryPositionEquality;
|
||||||
import org.geolatte.geom.Position;
|
import org.geolatte.geom.Position;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -12,7 +19,7 @@ public class GeolatteGeometryEquality<P extends Position> implements GeometryEqu
|
||||||
private final org.geolatte.geom.GeometryEquality delegate;
|
private final org.geolatte.geom.GeometryEquality delegate;
|
||||||
|
|
||||||
public GeolatteGeometryEquality() {
|
public GeolatteGeometryEquality() {
|
||||||
this( new GeometryPointEquality() );
|
this( new GeometryPositionEquality() );
|
||||||
}
|
}
|
||||||
|
|
||||||
public GeolatteGeometryEquality(org.geolatte.geom.GeometryEquality delegate) {
|
public GeolatteGeometryEquality(org.geolatte.geom.GeometryEquality delegate) {
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.testing;
|
package org.hibernate.spatial.testing;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -29,7 +29,7 @@ public interface NativeSQLStatement {
|
||||||
*
|
*
|
||||||
* @throws SQLException
|
* @throws SQLException
|
||||||
*/
|
*/
|
||||||
public PreparedStatement prepare(Connection connection) throws SQLException;
|
PreparedStatement prepare(Connection connection) throws SQLException;
|
||||||
|
|
||||||
public String toString();
|
String toString();
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,6 @@ public interface SQLExpressionTemplate {
|
||||||
*
|
*
|
||||||
* @return an insert SQL for testDataElement
|
* @return an insert SQL for testDataElement
|
||||||
*/
|
*/
|
||||||
public String toInsertSql(TestDataElement testDataElement);
|
String toInsertSql(TestDataElement testDataElement);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,7 +75,7 @@ public abstract class SpatialFunctionalTestCase extends BaseCoreFunctionalTestCa
|
||||||
try {
|
try {
|
||||||
session = openSession();
|
session = openSession();
|
||||||
tx = session.beginTransaction();
|
tx = session.beginTransaction();
|
||||||
String hql = String.format( "delete from %s", entityName(pckg) );
|
String hql = String.format( "delete from %s", entityName( pckg ) );
|
||||||
Query q = session.createQuery( hql );
|
Query q = session.createQuery( hql );
|
||||||
q.executeUpdate();
|
q.executeUpdate();
|
||||||
tx.commit();
|
tx.commit();
|
||||||
|
@ -238,7 +238,7 @@ public abstract class SpatialFunctionalTestCase extends BaseCoreFunctionalTestCa
|
||||||
"Failure on testsuite-suite for case " + id,
|
"Failure on testsuite-suite for case " + id,
|
||||||
geometryEquality.test(
|
geometryEquality.test(
|
||||||
(Geometry) expected,
|
(Geometry) expected,
|
||||||
(Geometry) org.geolatte.geom.jts.JTS.to( (org.geolatte.geom.Geometry) received )
|
org.geolatte.geom.jts.JTS.to( (org.geolatte.geom.Geometry) received )
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -255,9 +255,10 @@ public abstract class SpatialFunctionalTestCase extends BaseCoreFunctionalTestCa
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String entityName(String pckg) {
|
protected String entityName(String pckg) {
|
||||||
if (JTS.equalsIgnoreCase( pckg )) {
|
if ( JTS.equalsIgnoreCase( pckg ) ) {
|
||||||
return "org.hibernate.spatial.integration.jts.JtsGeomEntity";
|
return "org.hibernate.spatial.integration.jts.JtsGeomEntity";
|
||||||
} else {
|
}
|
||||||
|
else {
|
||||||
return "org.hibernate.spatial.integration.geolatte.GeomEntity";
|
return "org.hibernate.spatial.integration.geolatte.GeomEntity";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,8 +26,6 @@ public class TestData implements List<TestDataElement> {
|
||||||
protected TestData() {
|
protected TestData() {
|
||||||
}
|
}
|
||||||
|
|
||||||
;
|
|
||||||
|
|
||||||
public static TestData fromFile(String fileName) {
|
public static TestData fromFile(String fileName) {
|
||||||
TestDataReader reader = new TestDataReader();
|
TestDataReader reader = new TestDataReader();
|
||||||
return fromFile( fileName, reader );
|
return fromFile( fileName, reader );
|
||||||
|
|
|
@ -27,7 +27,7 @@ import org.hibernate.spatial.testing.dialects.sqlserver.SQLServerTestSupport;
|
||||||
*/
|
*/
|
||||||
public class TestSupportFactories {
|
public class TestSupportFactories {
|
||||||
|
|
||||||
private static TestSupportFactories instance = new TestSupportFactories();
|
private static final TestSupportFactories instance = new TestSupportFactories();
|
||||||
|
|
||||||
private TestSupportFactories() {
|
private TestSupportFactories() {
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,8 +16,8 @@ import org.geolatte.geom.Geometry;
|
||||||
/**
|
/**
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
@Converter( autoApply = true )
|
@Converter(autoApply = true)
|
||||||
public class GeometryConverter implements AttributeConverter<Geometry,byte[]> {
|
public class GeometryConverter implements AttributeConverter<Geometry, byte[]> {
|
||||||
@Override
|
@Override
|
||||||
public byte[] convertToDatabaseColumn(Geometry attribute) {
|
public byte[] convertToDatabaseColumn(Geometry attribute) {
|
||||||
if ( attribute == null ) {
|
if ( attribute == null ) {
|
||||||
|
|
|
@ -36,17 +36,17 @@ public class GeometryConverterTest extends BaseUnitTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConverterUsage() {
|
public void testConverterUsage() {
|
||||||
try ( final StandardServiceRegistry ssr = new StandardServiceRegistryBuilder()
|
try (final StandardServiceRegistry ssr = new StandardServiceRegistryBuilder()
|
||||||
.applySetting( AvailableSettings.DIALECT, GeoDBDialect.class )
|
.applySetting( AvailableSettings.DIALECT, GeoDBDialect.class )
|
||||||
.applySetting( AvailableSettings.HBM2DDL_AUTO, Action.CREATE_DROP )
|
.applySetting( AvailableSettings.HBM2DDL_AUTO, Action.CREATE_DROP )
|
||||||
.build() ) {
|
.build()) {
|
||||||
final MetadataSources metadataSources = new MetadataSources( ssr )
|
final MetadataSources metadataSources = new MetadataSources( ssr )
|
||||||
.addAnnotatedClass( GeometryConverter.class )
|
.addAnnotatedClass( GeometryConverter.class )
|
||||||
.addAnnotatedClass( MyEntity.class );
|
.addAnnotatedClass( MyEntity.class );
|
||||||
final MetadataBuilderImplementor metadataBuilder = (MetadataBuilderImplementor) metadataSources.getMetadataBuilder();
|
final MetadataBuilderImplementor metadataBuilder = (MetadataBuilderImplementor) metadataSources.getMetadataBuilder();
|
||||||
|
|
||||||
try ( final SessionFactoryImplementor sessionFactory =
|
try (final SessionFactoryImplementor sessionFactory = (SessionFactoryImplementor) metadataBuilder.build()
|
||||||
(SessionFactoryImplementor) metadataBuilder.build().buildSessionFactory() ) {
|
.buildSessionFactory()) {
|
||||||
|
|
||||||
final TypeConfiguration typeConfiguration = sessionFactory.getMetamodel().getTypeConfiguration();
|
final TypeConfiguration typeConfiguration = sessionFactory.getMetamodel().getTypeConfiguration();
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ import org.geolatte.geom.Geometry;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
@Entity
|
@Entity
|
||||||
@Table( name = "SP_CUST_TYPE_CONV_ENTITY")
|
@Table(name = "SP_CUST_TYPE_CONV_ENTITY")
|
||||||
public class MyEntity {
|
public class MyEntity {
|
||||||
private Integer id;
|
private Integer id;
|
||||||
private Geometry geometry;
|
private Geometry geometry;
|
||||||
|
|
|
@ -1,24 +1,9 @@
|
||||||
/*
|
/*
|
||||||
* This file is part of Hibernate Spatial, an extension to the
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
* hibernate ORM solution for spatial (geographic) data.
|
|
||||||
*
|
*
|
||||||
* Copyright © 2014 Adtech Geospatial
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
*
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
* This library is free software; you can redistribute it and/or
|
|
||||||
* modify it under the terms of the GNU Lesser General Public
|
|
||||||
* License as published by the Free Software Foundation; either
|
|
||||||
* version 2.1 of the License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This library is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
* Lesser General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Lesser General Public
|
|
||||||
* License along with this library; if not, write to the Free Software
|
|
||||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.testing.dialects.db2;
|
package org.hibernate.spatial.testing.dialects.db2;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
|
|
@ -1,24 +1,9 @@
|
||||||
/*
|
/*
|
||||||
* This file is part of Hibernate Spatial, an extension to the
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
* hibernate ORM solution for spatial (geographic) data.
|
|
||||||
*
|
*
|
||||||
* Copyright © 2014 Adtech Geospatial
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
*
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
* This library is free software; you can redistribute it and/or
|
|
||||||
* modify it under the terms of the GNU Lesser General Public
|
|
||||||
* License as published by the Free Software Foundation; either
|
|
||||||
* version 2.1 of the License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This library is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
* Lesser General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Lesser General Public
|
|
||||||
* License along with this library; if not, write to the Free Software
|
|
||||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.testing.dialects.db2;
|
package org.hibernate.spatial.testing.dialects.db2;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
|
|
@ -1,24 +1,9 @@
|
||||||
/*
|
/*
|
||||||
* This file is part of Hibernate Spatial, an extension to the
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
* hibernate ORM solution for spatial (geographic) data.
|
|
||||||
*
|
*
|
||||||
* Copyright © 2014 Adtech Geospatial
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
*
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
* This library is free software; you can redistribute it and/or
|
|
||||||
* modify it under the terms of the GNU Lesser General Public
|
|
||||||
* License as published by the Free Software Foundation; either
|
|
||||||
* version 2.1 of the License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This library is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
* Lesser General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Lesser General Public
|
|
||||||
* License along with this library; if not, write to the Free Software
|
|
||||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.testing.dialects.db2;
|
package org.hibernate.spatial.testing.dialects.db2;
|
||||||
|
|
||||||
import org.hibernate.spatial.testing.SQLExpressionTemplate;
|
import org.hibernate.spatial.testing.SQLExpressionTemplate;
|
||||||
|
|
|
@ -1,22 +1,8 @@
|
||||||
/*
|
/*
|
||||||
* This file is part of Hibernate Spatial, an extension to the
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
* hibernate ORM solution for spatial (geographic) data.
|
|
||||||
*
|
*
|
||||||
* Copyright © 2014 Adtech Geospatial
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
*
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
* This library is free software; you can redistribute it and/or
|
|
||||||
* modify it under the terms of the GNU Lesser General Public
|
|
||||||
* License as published by the Free Software Foundation; either
|
|
||||||
* version 2.1 of the License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This library is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
* Lesser General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Lesser General Public
|
|
||||||
* License along with this library; if not, write to the Free Software
|
|
||||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.hibernate.spatial.testing.dialects.db2;
|
package org.hibernate.spatial.testing.dialects.db2;
|
||||||
|
@ -35,8 +21,8 @@ import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||||
public class DB2TestSupport extends TestSupport {
|
public class DB2TestSupport extends TestSupport {
|
||||||
|
|
||||||
public TestData createTestData(BaseCoreFunctionalTestCase testcase) {
|
public TestData createTestData(BaseCoreFunctionalTestCase testcase) {
|
||||||
if ( "org.hibernate.spatial.integration.TestSpatialFunctions".equals( testcase.getClass()
|
if ( "org.hibernate.spatial.integration.TestSpatialFunctions".equals(
|
||||||
.getCanonicalName() ) ) {
|
testcase.getClass().getCanonicalName() ) ) {
|
||||||
return TestData.fromFile( "db2/test-db2nozm-only-polygon.xml" );
|
return TestData.fromFile( "db2/test-db2nozm-only-polygon.xml" );
|
||||||
}
|
}
|
||||||
return TestData.fromFile( "db2/test-db2nozm-data-set.xml" );
|
return TestData.fromFile( "db2/test-db2nozm-data-set.xml" );
|
||||||
|
|
|
@ -44,7 +44,7 @@ public class GeoDBDataSourceUtils extends DataSourceUtils {
|
||||||
String errorMsg = "Problem initializing GeoDB.";
|
String errorMsg = "Problem initializing GeoDB.";
|
||||||
try {
|
try {
|
||||||
Class<?> geoDB = Thread.currentThread().getContextClassLoader().loadClass( "geodb.GeoDB" );
|
Class<?> geoDB = Thread.currentThread().getContextClassLoader().loadClass( "geodb.GeoDB" );
|
||||||
Method m = geoDB.getDeclaredMethod( "InitGeoDB", new Class[] { Connection.class } );
|
Method m = geoDB.getDeclaredMethod( "InitGeoDB", Connection.class );
|
||||||
m.invoke( null, conn );
|
m.invoke( null, conn );
|
||||||
}
|
}
|
||||||
catch (ClassNotFoundException e) {
|
catch (ClassNotFoundException e) {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue