commit
c007fd4ab5
|
@ -86,7 +86,7 @@ task ciBuild {
|
|||
|
||||
|
||||
wrapper {
|
||||
gradleVersion = '4.10.2'
|
||||
gradleVersion = '4.10.3'
|
||||
distributionType = Wrapper.DistributionType.ALL
|
||||
}
|
||||
|
||||
|
|
|
@ -5,4 +5,4 @@
|
|||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
|
||||
jdbcDependency 'com.sap.cloud.db.jdbc:ngdbc:2.2.16'
|
||||
jdbcDependency 'com.sap.cloud.db.jdbc:ngdbc:2.4.59'
|
|
@ -4,4 +4,4 @@
|
|||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
jdbcDependency 'org.mariadb.jdbc:mariadb-java-client:1.5.7'
|
||||
jdbcDependency 'org.mariadb.jdbc:mariadb-java-client:2.2.4'
|
|
@ -39,6 +39,14 @@ The `DataSource` `ConnectionProvider` also (optionally) accepts the `hibernate.c
|
|||
If specified, the https://docs.oracle.com/javase/8/docs/api/javax/sql/DataSource.html#getConnection-java.lang.String-java.lang.String-[`DataSource#getConnection(String username, String password)`] will be used.
|
||||
Otherwise, the no-arg form is used.
|
||||
|
||||
[[database-connectionprovider-driver]]
|
||||
=== Driver Configuration
|
||||
`hibernate.connection.driver_class`:: The name of the JDBC Driver class to use
|
||||
`hibernate.connection.url`:: The JDBC connection url
|
||||
`hibernate.connection.*`:: All such setting names (except the <<appendices/Configurations.adoc#configurations-database-connection,predefined ones>>) will have the `hibernate.connection.` prefix stripped. The remaining name and the original value will be passed to the driver as a JDBC connection property
|
||||
|
||||
NOTE: Not all properties apply to all situations. For example, if you are providing a data source, `hibernate.connection.driver_class` setting will not be used.
|
||||
|
||||
[[database-connectionprovider-c3p0]]
|
||||
=== Using c3p0
|
||||
|
||||
|
@ -48,13 +56,10 @@ To use the c3p0 integration, the application must include the `hibernate-c3p0` m
|
|||
====
|
||||
|
||||
Hibernate also provides support for applications to use http://www.mchange.com/projects/c3p0/[c3p0] connection pooling.
|
||||
When using this c3p0 support, a number of additional configuration settings are recognized.
|
||||
When c3p0 support is enabled, a number of c3p0-specific configuration settings are recognized in addition to the general ones described in <<database-connectionprovider-driver>>.
|
||||
|
||||
Transaction isolation of the Connections is managed by the `ConnectionProvider` itself. See <<database-connectionprovider-isolation>>.
|
||||
|
||||
`hibernate.connection.driver_class`:: The name of the JDBC Driver class to use
|
||||
`hibernate.connection.url`:: The JDBC connection url.
|
||||
Any settings prefixed with `hibernate.connection.` (other than the "special ones"):: These all have the `hibernate.connection.` prefix stripped and the rest will be passed as JDBC connection properties
|
||||
`hibernate.c3p0.min_size` or `c3p0.minPoolSize`:: The minimum size of the c3p0 pool. See http://www.mchange.com/projects/c3p0/#minPoolSize[c3p0 minPoolSize]
|
||||
`hibernate.c3p0.max_size` or `c3p0.maxPoolSize`:: The maximum size of the c3p0 pool. See http://www.mchange.com/projects/c3p0/#maxPoolSize[c3p0 maxPoolSize]
|
||||
`hibernate.c3p0.timeout` or `c3p0.maxIdleTime`:: The Connection idle time. See http://www.mchange.com/projects/c3p0/#maxIdleTime[c3p0 maxIdleTime]
|
||||
|
|
|
@ -28,11 +28,11 @@ ext {
|
|||
geolatteVersion = '1.4.0'
|
||||
|
||||
// Wildfly version targeted by module ZIP; Arquillian/Shrinkwrap versions used for CDI testing and testing the module ZIP
|
||||
wildflyVersion = '14.0.1.Final'
|
||||
arquillianVersion = '1.4.0.Final'
|
||||
wildflyVersion = '17.0.1.Final'
|
||||
arquillianVersion = '1.4.1.Final'
|
||||
shrinkwrapVersion = '1.2.6'
|
||||
shrinkwrapDescriptorsVersion = '2.0.0'
|
||||
wildflyArquillianContainerVersion = '2.1.1.Final'
|
||||
wildflyArquillianContainerVersion = '2.2.0.Final'
|
||||
|
||||
jodaTimeVersion = '2.3'
|
||||
|
||||
|
@ -109,8 +109,7 @@ ext {
|
|||
hsqldb: "org.hsqldb:hsqldb:2.3.2",
|
||||
derby: "org.apache.derby:derby:10.11.1.1",
|
||||
postgresql: 'org.postgresql:postgresql:42.2.2',
|
||||
//Upgrade MySQL Driver only when this issue gets fixed: https://bugs.mysql.com/bug.php?id=85941
|
||||
mysql: 'mysql:mysql-connector-java:5.1.46',
|
||||
mysql: 'mysql:mysql-connector-java:8.0.17',
|
||||
mariadb: 'org.mariadb.jdbc:mariadb-java-client:2.2.3',
|
||||
|
||||
oracle: 'com.oracle.jdbc:ojdbc8:12.2.0.1',
|
||||
|
@ -159,7 +158,7 @@ ext {
|
|||
wildfly_arquillian_container_managed: "org.wildfly.arquillian:wildfly-arquillian-container-managed:${wildflyArquillianContainerVersion}",
|
||||
jboss_vfs: "org.jboss:jboss-vfs:3.2.11.Final",
|
||||
jipijapa_spi: "org.wildfly:jipijapa-spi:${wildflyVersion}",
|
||||
wildfly_transaction_client : 'org.wildfly.transaction:wildfly-transaction-client:1.0.3.Final',
|
||||
wildfly_transaction_client : 'org.wildfly.transaction:wildfly-transaction-client:1.1.7.Final',
|
||||
|
||||
jboss_ejb_spec_jar : 'org.jboss.spec.javax.ejb:jboss-ejb-api_3.2_spec:1.0.0.Final',
|
||||
jboss_annotation_spec_jar : 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final'
|
||||
|
|
Binary file not shown.
|
@ -1,5 +1,5 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.2-all.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.3-all.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
|
|
@ -99,6 +99,7 @@ import static org.hibernate.cfg.AvailableSettings.LOG_SESSION_METRICS;
|
|||
import static org.hibernate.cfg.AvailableSettings.MAX_FETCH_DEPTH;
|
||||
import static org.hibernate.cfg.AvailableSettings.MULTI_TENANT_IDENTIFIER_RESOLVER;
|
||||
import static org.hibernate.cfg.AvailableSettings.NATIVE_EXCEPTION_HANDLING_51_COMPLIANCE;
|
||||
import static org.hibernate.cfg.AvailableSettings.OMIT_JOIN_OF_SUPERCLASS_TABLES;
|
||||
import static org.hibernate.cfg.AvailableSettings.ORDER_INSERTS;
|
||||
import static org.hibernate.cfg.AvailableSettings.JPA_CALLBACKS_ENABLED;
|
||||
import static org.hibernate.cfg.AvailableSettings.ORDER_UPDATES;
|
||||
|
@ -212,6 +213,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
|||
private final boolean procedureParameterNullPassingEnabled;
|
||||
private final boolean collectionJoinSubqueryRewriteEnabled;
|
||||
private boolean jdbcStyleParamsZeroBased;
|
||||
private final boolean omitJoinOfSuperclassTablesEnabled;
|
||||
|
||||
// Caching
|
||||
private boolean secondLevelCacheEnabled;
|
||||
|
@ -360,6 +362,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
|||
CONVENTIONAL_JAVA_CONSTANTS, BOOLEAN, true );
|
||||
this.procedureParameterNullPassingEnabled = cfgService.getSetting( PROCEDURE_NULL_PARAM_PASSING, BOOLEAN, false );
|
||||
this.collectionJoinSubqueryRewriteEnabled = cfgService.getSetting( COLLECTION_JOIN_SUBQUERY, BOOLEAN, true );
|
||||
this.omitJoinOfSuperclassTablesEnabled = cfgService.getSetting( OMIT_JOIN_OF_SUPERCLASS_TABLES, BOOLEAN, true );
|
||||
|
||||
final RegionFactory regionFactory = serviceRegistry.getService( RegionFactory.class );
|
||||
if ( !NoCachingRegionFactory.class.isInstance( regionFactory ) ) {
|
||||
|
@ -1064,6 +1067,12 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
|||
return enhancementAsProxyEnabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isOmitJoinOfSuperclassTablesEnabled() {
|
||||
return omitJoinOfSuperclassTablesEnabled;
|
||||
}
|
||||
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// In-flight mutation access
|
||||
|
||||
|
|
|
@ -15,12 +15,15 @@ import java.util.HashSet;
|
|||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.ServiceConfigurationError;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
|
||||
/**
|
||||
* A service loader bound to an {@link AggregatedClassLoader}.
|
||||
|
@ -28,6 +31,8 @@ import org.hibernate.AssertionFailure;
|
|||
*/
|
||||
abstract class AggregatedServiceLoader<S> {
|
||||
|
||||
private static final CoreMessageLogger log = CoreLogging.messageLogger( AggregatedServiceLoader.class );
|
||||
|
||||
private static final Method SERVICE_LOADER_STREAM_METHOD;
|
||||
private static final Method PROVIDER_TYPE_METHOD;
|
||||
|
||||
|
@ -145,17 +150,16 @@ abstract class AggregatedServiceLoader<S> {
|
|||
* @param <S> The type of loaded services.
|
||||
*/
|
||||
private static class ClassPathAndModulePathAggregatedServiceLoader<S> extends AggregatedServiceLoader<S> {
|
||||
private final Class<S> serviceContract;
|
||||
private final ServiceLoader<S> aggregatedClassLoaderServiceLoader;
|
||||
private final List<ServiceLoader<S>> delegates;
|
||||
private Collection<S> cache = null;
|
||||
|
||||
private ClassPathAndModulePathAggregatedServiceLoader(AggregatedClassLoader aggregatedClassLoader,
|
||||
Class<S> serviceContract) {
|
||||
this.serviceContract = serviceContract;
|
||||
this.delegates = new ArrayList<>();
|
||||
// Always try the aggregated class loader first
|
||||
this.delegates.add( ServiceLoader.load( serviceContract, aggregatedClassLoader ) );
|
||||
|
||||
// Then also try the individual class loaders,
|
||||
// because only them can instantiate services provided by jars in the module path
|
||||
this.aggregatedClassLoaderServiceLoader = ServiceLoader.load( serviceContract, aggregatedClassLoader );
|
||||
final Iterator<ClassLoader> clIterator = aggregatedClassLoader.newClassLoaderIterator();
|
||||
while ( clIterator.hasNext() ) {
|
||||
this.delegates.add(
|
||||
|
@ -185,54 +189,94 @@ abstract class AggregatedServiceLoader<S> {
|
|||
return cache;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Collection<S> loadAll() {
|
||||
Set<String> alreadyEncountered = new HashSet<>();
|
||||
Set<S> result = new LinkedHashSet<>();
|
||||
delegates.stream()
|
||||
// Each loader's stream() method returns a stream of service providers: flatten these into a single stream
|
||||
.flatMap( delegate -> {
|
||||
try {
|
||||
return (Stream<? extends Supplier<S>>) SERVICE_LOADER_STREAM_METHOD.invoke( delegate );
|
||||
}
|
||||
catch (RuntimeException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw new AssertionFailure( "Error calling ServiceLoader.stream()", e );
|
||||
}
|
||||
} )
|
||||
// For each provider, check its type to be sure we don't use a provider twice, then get the service
|
||||
.forEach( provider -> {
|
||||
Class<?> type;
|
||||
try {
|
||||
type = (Class<?>) PROVIDER_TYPE_METHOD.invoke( provider );
|
||||
}
|
||||
catch (RuntimeException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw new AssertionFailure( "Error calling ServiceLoader.Provider.type()", e );
|
||||
}
|
||||
String typeName = type.getName();
|
||||
/*
|
||||
* We may encounter the same service provider multiple times,
|
||||
* because the individual class loaders may give access to the same types
|
||||
* (at the very least a single class loader may be present twice in the aggregated class loader).
|
||||
* However, we only want to get the service from each provider once.
|
||||
*
|
||||
* ServiceLoader.stream() is useful in that regard,
|
||||
* since it allows us to check the type of the service provider
|
||||
* before the service is even instantiated.
|
||||
*
|
||||
* We could just instantiate every service and check their type afterwards,
|
||||
* but 1. it would lead to unnecessary instantiation which could have side effects,
|
||||
* in particular regarding class loading,
|
||||
* and 2. the type of the provider may not always be the type of the service,
|
||||
* and one provider may return different types of services
|
||||
* depending on conditions known only to itself.
|
||||
*/
|
||||
if ( alreadyEncountered.add( typeName ) ) {
|
||||
result.add( provider.get() );
|
||||
}
|
||||
} );
|
||||
|
||||
// Always try the aggregated class loader first
|
||||
Iterator<? extends Supplier<S>> providerIterator = providerStream( aggregatedClassLoaderServiceLoader )
|
||||
.iterator();
|
||||
while ( providerIterator.hasNext() ) {
|
||||
Supplier<S> provider = providerIterator.next();
|
||||
collectServiceIfNotDuplicate( result, alreadyEncountered, provider );
|
||||
}
|
||||
|
||||
/*
|
||||
* Then also try the individual class loaders,
|
||||
* because only them can instantiate services provided by jars in the module path.
|
||||
*/
|
||||
for ( ServiceLoader<S> delegate : delegates ) {
|
||||
providerIterator = providerStream( delegate ).iterator();
|
||||
/*
|
||||
* Note that advancing the stream itself can lead to (arguably) "legitimate" errors,
|
||||
* where we fail to load the service,
|
||||
* but only because individual classloader has its own definition of the service contract class,
|
||||
* which is different from ours.
|
||||
* In that case (still arguably), the error should be ignored.
|
||||
* That's why we wrap the call to hasNext in a method that catches an logs errors.
|
||||
* See https://hibernate.atlassian.net/browse/HHH-13551.
|
||||
*/
|
||||
while ( hasNextIgnoringServiceConfigurationError( providerIterator ) ) {
|
||||
Supplier<S> provider = providerIterator.next();
|
||||
collectServiceIfNotDuplicate( result, alreadyEncountered, provider );
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Stream<? extends Supplier<S>> providerStream(ServiceLoader<S> serviceLoader) {
|
||||
try {
|
||||
return ( (Stream<? extends Supplier<S>>) SERVICE_LOADER_STREAM_METHOD.invoke( serviceLoader ) );
|
||||
}
|
||||
catch (RuntimeException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw new AssertionFailure( "Error calling ServiceLoader.stream()", e );
|
||||
}
|
||||
}
|
||||
|
||||
private boolean hasNextIgnoringServiceConfigurationError(Iterator<?> iterator) {
|
||||
while ( true ) {
|
||||
try {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
catch (ServiceConfigurationError e) {
|
||||
log.ignoringServiceConfigurationError( serviceContract, e );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* We may encounter the same service provider multiple times,
|
||||
* because the individual class loaders may give access to the same types
|
||||
* (at the very least a single class loader may be present twice in the aggregated class loader).
|
||||
* However, we only want to get the service from each provider once.
|
||||
*
|
||||
* ServiceLoader.stream() is useful in that regard,
|
||||
* since it allows us to check the type of the service provider
|
||||
* before the service is even instantiated.
|
||||
*
|
||||
* We could just instantiate every service and check their type afterwards,
|
||||
* but 1. it would lead to unnecessary instantiation which could have side effects,
|
||||
* in particular regarding class loading,
|
||||
* and 2. the type of the provider may not always be the type of the service,
|
||||
* and one provider may return different types of services
|
||||
* depending on conditions known only to itself.
|
||||
*/
|
||||
private void collectServiceIfNotDuplicate(Set<S> result, Set<String> alreadyEncountered, Supplier<S> provider) {
|
||||
Class<?> type;
|
||||
try {
|
||||
type = (Class<?>) PROVIDER_TYPE_METHOD.invoke( provider );
|
||||
}
|
||||
catch (RuntimeException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw new AssertionFailure( "Error calling ServiceLoader.Provider.type()", e );
|
||||
}
|
||||
String typeName = type.getName();
|
||||
if ( alreadyEncountered.add( typeName ) ) {
|
||||
result.add( provider.get() );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
cache = null;
|
||||
|
|
|
@ -447,4 +447,9 @@ public class AbstractDelegatingSessionFactoryOptions implements SessionFactoryOp
|
|||
public boolean isEnhancementAsProxyEnabled() {
|
||||
return delegate.isEnhancementAsProxyEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isOmitJoinOfSuperclassTablesEnabled() {
|
||||
return delegate.isOmitJoinOfSuperclassTablesEnabled();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -314,4 +314,6 @@ public interface SessionFactoryOptions {
|
|||
default boolean isEnhancementAsProxyEnabled() {
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean isOmitJoinOfSuperclassTablesEnabled();
|
||||
}
|
||||
|
|
|
@ -83,13 +83,13 @@ public class XmlMappingBinderAccess {
|
|||
xmlInputStream.close();
|
||||
}
|
||||
catch (IOException e) {
|
||||
LOG.debugf( "Unable to close InputStream obtained from InputStreamAccess : " + xmlInputStreamAccess.getStreamName() );
|
||||
LOG.debugf( "Unable to close InputStream obtained from InputStreamAccess : %s", xmlInputStreamAccess.getStreamName() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Binding bind(InputStream xmlInputStream) {
|
||||
LOG.tracef( "reading mappings from InputStream" );
|
||||
LOG.trace( "reading mappings from InputStream" );
|
||||
final Origin origin = new Origin( SourceType.INPUT_STREAM, null );
|
||||
return new InputStreamXmlSource( origin, xmlInputStream, false ).doBind( getMappingBinder() );
|
||||
}
|
||||
|
|
|
@ -20,10 +20,10 @@ import org.hibernate.bytecode.enhance.internal.tracker.SimpleFieldTracker;
|
|||
import org.hibernate.bytecode.enhance.spi.CollectionTracker;
|
||||
import org.hibernate.bytecode.enhance.spi.EnhancerConstants;
|
||||
import org.hibernate.bytecode.enhance.spi.interceptor.LazyAttributeLoadingInterceptor;
|
||||
import org.hibernate.engine.spi.ExtendedSelfDirtinessTracker;
|
||||
import org.hibernate.engine.spi.PersistentAttributeInterceptor;
|
||||
import org.hibernate.engine.spi.CompositeOwner;
|
||||
import org.hibernate.engine.spi.CompositeTracker;
|
||||
import org.hibernate.engine.spi.ExtendedSelfDirtinessTracker;
|
||||
import org.hibernate.engine.spi.PersistentAttributeInterceptor;
|
||||
|
||||
import net.bytebuddy.asm.Advice;
|
||||
|
||||
|
@ -358,9 +358,9 @@ class CodeTemplates {
|
|||
static void enter(@FieldValue Collection<?> field, @Advice.Argument(0) Collection<?> argument, @MappedBy String mappedBy) {
|
||||
if ( field != null && Hibernate.isPropertyInitialized( field, mappedBy ) ) {
|
||||
Object[] array = field.toArray();
|
||||
for ( int i = 0; i < array.length; i++ ) {
|
||||
if ( argument == null || !argument.contains( array[i] ) ) {
|
||||
setterNull( array[i], null );
|
||||
for ( Object array1 : array ) {
|
||||
if ( argument == null || !argument.contains( array1 ) ) {
|
||||
setterNull( array1, null );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -370,9 +370,9 @@ class CodeTemplates {
|
|||
static void exit(@Advice.This Object self, @Advice.Argument(0) Collection<?> argument, @MappedBy String mappedBy) {
|
||||
if ( argument != null && Hibernate.isPropertyInitialized( argument, mappedBy ) ) {
|
||||
Object[] array = argument.toArray();
|
||||
for ( int i = 0; i < array.length; i++ ) {
|
||||
if ( Hibernate.isPropertyInitialized( array[i], mappedBy ) && getter( array[i] ) != self ) {
|
||||
setterSelf( array[i], self );
|
||||
for ( Object array1 : array ) {
|
||||
if ( Hibernate.isPropertyInitialized( array1, mappedBy ) && getter( array1 ) != self ) {
|
||||
setterSelf( array1, self );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -399,9 +399,9 @@ class CodeTemplates {
|
|||
static void enter(@FieldValue Map<?, ?> field, @Advice.Argument(0) Map<?, ?> argument, @MappedBy String mappedBy) {
|
||||
if ( field != null && Hibernate.isPropertyInitialized( field, mappedBy ) ) {
|
||||
Object[] array = field.values().toArray();
|
||||
for ( int i = 0; i < array.length; i++ ) {
|
||||
if ( argument == null || !argument.values().contains( array[i] ) ) {
|
||||
setterNull( array[i], null );
|
||||
for ( Object array1 : array ) {
|
||||
if ( argument == null || !argument.values().contains( array1 ) ) {
|
||||
setterNull( array1, null );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,9 +411,9 @@ class CodeTemplates {
|
|||
static void exit(@Advice.This Object self, @Advice.Argument(0) Map<?, ?> argument, @MappedBy String mappedBy) {
|
||||
if ( argument != null && Hibernate.isPropertyInitialized( argument, mappedBy ) ) {
|
||||
Object[] array = argument.values().toArray();
|
||||
for ( int i = 0; i < array.length; i++ ) {
|
||||
if ( Hibernate.isPropertyInitialized( array[i], mappedBy ) && getter( array[i] ) != self ) {
|
||||
setterSelf( array[i], self );
|
||||
for ( Object array1 : array ) {
|
||||
if ( Hibernate.isPropertyInitialized( array1, mappedBy ) && getter( array1 ) != self ) {
|
||||
setterSelf( array1, self );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -467,9 +467,9 @@ class CodeTemplates {
|
|||
static void enter(@Advice.This Object self, @FieldValue Collection<?> field, @Advice.Argument(0) Collection<?> argument, @MappedBy String mappedBy) {
|
||||
if ( field != null && Hibernate.isPropertyInitialized( field, mappedBy ) ) {
|
||||
Object[] array = field.toArray();
|
||||
for ( int i = 0; i < array.length; i++ ) {
|
||||
if ( argument == null || !argument.contains( array[i] ) ) {
|
||||
getter( array[i] ).remove( self );
|
||||
for ( Object array1 : array ) {
|
||||
if ( argument == null || !argument.contains( array1 ) ) {
|
||||
getter( array1 ).remove( self );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -479,9 +479,9 @@ class CodeTemplates {
|
|||
static void exit(@Advice.This Object self, @Advice.Argument(0) Collection<?> argument, @MappedBy String mappedBy) {
|
||||
if ( argument != null && Hibernate.isPropertyInitialized( argument, mappedBy ) ) {
|
||||
Object[] array = argument.toArray();
|
||||
for ( int i = 0; i < array.length; i++ ) {
|
||||
if ( Hibernate.isPropertyInitialized( array[i], mappedBy ) ) {
|
||||
Collection<Object> c = getter( array[i] );
|
||||
for ( Object array1 : array ) {
|
||||
if ( Hibernate.isPropertyInitialized( array1, mappedBy ) ) {
|
||||
Collection<Object> c = getter( array1 );
|
||||
if ( c != self && c != null ) {
|
||||
c.add( self );
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.hibernate.bytecode.enhance.spi.interceptor;
|
|||
|
||||
import java.util.Locale;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.hibernate.FlushMode;
|
||||
import org.hibernate.LazyInitializationException;
|
||||
|
@ -31,8 +30,7 @@ public class EnhancementHelper {
|
|||
public static boolean includeInBaseFetchGroup(
|
||||
Property bootMapping,
|
||||
boolean isEnhanced,
|
||||
boolean allowEnhancementAsProxy,
|
||||
Function<String,Boolean> hasSubclassChecker) {
|
||||
boolean allowEnhancementAsProxy) {
|
||||
final Value value = bootMapping.getValue();
|
||||
|
||||
if ( ! isEnhanced ) {
|
||||
|
@ -57,7 +55,6 @@ public class EnhancementHelper {
|
|||
}
|
||||
// include it in the base fetch group so long as the config allows
|
||||
// using the FK to create an "enhancement proxy"
|
||||
// return allowEnhancementAsProxy && hasSubclassChecker.apply( toOne.getReferencedEntityName() );
|
||||
return allowEnhancementAsProxy;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ import java.util.LinkedHashSet;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.hibernate.mapping.PersistentClass;
|
||||
import org.hibernate.mapping.Property;
|
||||
|
@ -34,8 +33,7 @@ public class LazyAttributesMetadata implements Serializable {
|
|||
public static LazyAttributesMetadata from(
|
||||
PersistentClass mappedEntity,
|
||||
boolean isEnhanced,
|
||||
boolean allowEnhancementAsProxy,
|
||||
Function<String,Boolean> hasSubclassChecker) {
|
||||
boolean allowEnhancementAsProxy) {
|
||||
final Map<String, LazyAttributeDescriptor> lazyAttributeDescriptorMap = new LinkedHashMap<>();
|
||||
final Map<String, Set<String>> fetchGroupToAttributesMap = new HashMap<>();
|
||||
|
||||
|
@ -48,8 +46,7 @@ public class LazyAttributesMetadata implements Serializable {
|
|||
final boolean lazy = ! EnhancementHelper.includeInBaseFetchGroup(
|
||||
property,
|
||||
isEnhanced,
|
||||
allowEnhancementAsProxy,
|
||||
hasSubclassChecker
|
||||
allowEnhancementAsProxy
|
||||
);
|
||||
if ( lazy ) {
|
||||
final LazyAttributeDescriptor lazyAttributeDescriptor = LazyAttributeDescriptor.from( property, i, x++ );
|
||||
|
|
|
@ -111,6 +111,7 @@ public class CollectionCacheInvalidator
|
|||
return;
|
||||
}
|
||||
final EntityMetamodel entityMetamodel = persister.getEntityMetamodel();
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
for ( String role : collectionRoles ) {
|
||||
final CollectionPersister collectionPersister = metamodel.collectionPersister( role );
|
||||
if ( !collectionPersister.hasCache() ) {
|
||||
|
@ -142,7 +143,9 @@ public class CollectionCacheInvalidator
|
|||
}
|
||||
}
|
||||
else {
|
||||
LOG.debug( "Evict CollectionRegion " + role );
|
||||
if ( debugEnabled ) {
|
||||
LOG.debug( "Evict CollectionRegion " + role );
|
||||
}
|
||||
final CollectionDataAccess cacheAccessStrategy = collectionPersister.getCacheAccessStrategy();
|
||||
final SoftLock softLock = cacheAccessStrategy.lockRegion();
|
||||
session.getActionQueue().registerProcess( (success, session1) -> {
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.hibernate.pretty.MessageHelper;
|
|||
/**
|
||||
* @author Steve Ebersole
|
||||
* @author Strong Liu
|
||||
* @author Gail Badner
|
||||
*/
|
||||
public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildingContext {
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( EnabledCaching.class );
|
||||
|
@ -57,6 +58,10 @@ public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildin
|
|||
|
||||
private final Map<String,Region> regionsByName = new ConcurrentHashMap<>();
|
||||
|
||||
// A map by name for QueryResultsRegion instances that have the same name as a Region
|
||||
// in #regionsByName.
|
||||
private final Map<String, QueryResultsRegion> queryResultsRegionsByDuplicateName = new ConcurrentHashMap<>();
|
||||
|
||||
private final Map<NavigableRole,EntityDataAccess> entityAccessMap = new ConcurrentHashMap<>();
|
||||
private final Map<NavigableRole,NaturalIdDataAccess> naturalIdAccessMap = new ConcurrentHashMap<>();
|
||||
private final Map<NavigableRole,CollectionDataAccess> collectionAccessMap = new ConcurrentHashMap<>();
|
||||
|
@ -204,6 +209,8 @@ public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildin
|
|||
|
||||
@Override
|
||||
public Region getRegion(String regionName) {
|
||||
// The Region in regionsByName has precedence over the
|
||||
// QueryResultsRegion in #queryResultsRegionsByDuplicateName
|
||||
return regionsByName.get( regionName );
|
||||
}
|
||||
|
||||
|
@ -488,12 +495,23 @@ public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildin
|
|||
}
|
||||
|
||||
protected QueryResultsCache makeQueryResultsRegionAccess(String regionName) {
|
||||
final QueryResultsRegion region = (QueryResultsRegion) regionsByName.computeIfAbsent(
|
||||
final Region region = regionsByName.computeIfAbsent(
|
||||
regionName,
|
||||
this::makeQueryResultsRegion
|
||||
);
|
||||
final QueryResultsRegion queryResultsRegion;
|
||||
if ( QueryResultsRegion.class.isInstance( region ) ) {
|
||||
queryResultsRegion = (QueryResultsRegion) region;
|
||||
}
|
||||
else {
|
||||
// There was already a different type of Region with the same name.
|
||||
queryResultsRegion = queryResultsRegionsByDuplicateName.computeIfAbsent(
|
||||
regionName,
|
||||
this::makeQueryResultsRegion
|
||||
);
|
||||
}
|
||||
final QueryResultsCacheImpl regionAccess = new QueryResultsCacheImpl(
|
||||
region,
|
||||
queryResultsRegion,
|
||||
timestampsCache
|
||||
);
|
||||
namedQueryResultsCacheMap.put( regionName, regionAccess );
|
||||
|
@ -502,20 +520,9 @@ public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildin
|
|||
}
|
||||
|
||||
protected QueryResultsRegion makeQueryResultsRegion(String regionName) {
|
||||
// make sure there is not an existing domain-data region with that name..
|
||||
final Region existing = regionsByName.get( regionName );
|
||||
if ( existing != null ) {
|
||||
if ( !QueryResultsRegion.class.isInstance( existing ) ) {
|
||||
throw new IllegalStateException( "Cannot store both domain-data and query-result-data in the same region [" + regionName );
|
||||
}
|
||||
|
||||
throw new IllegalStateException( "Illegal call to create QueryResultsRegion - one already existed" );
|
||||
}
|
||||
|
||||
return regionFactory.buildQueryResultsRegion( regionName, getSessionFactory() );
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Set<String> getCacheRegionNames() {
|
||||
return regionsByName.keySet();
|
||||
|
@ -524,6 +531,10 @@ public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildin
|
|||
@Override
|
||||
public void evictRegion(String regionName) {
|
||||
getRegion( regionName ).clear();
|
||||
final QueryResultsRegion queryResultsRegionWithDuplicateName = queryResultsRegionsByDuplicateName.get( regionName );
|
||||
if ( queryResultsRegionWithDuplicateName != null ) {
|
||||
queryResultsRegionWithDuplicateName.clear();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -545,6 +556,9 @@ public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildin
|
|||
for ( Region region : regionsByName.values() ) {
|
||||
region.destroy();
|
||||
}
|
||||
for ( Region region : queryResultsRegionsByDuplicateName.values() ) {
|
||||
region.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -52,7 +52,9 @@ public interface CacheImplementor extends Service, Cache, org.hibernate.engine.s
|
|||
void prime(Set<DomainDataRegionConfig> cacheRegionConfigs);
|
||||
|
||||
/**
|
||||
* Get a cache Region by name
|
||||
* Get a cache Region by name. If there is both a {@link DomainDataRegion}
|
||||
* and a {@link QueryResultsRegion} with the specified name, then the
|
||||
* {@link DomainDataRegion} will be returned.
|
||||
*
|
||||
* @apiNote It is only valid to call this method after {@link #prime} has
|
||||
* been performed
|
||||
|
|
|
@ -7,8 +7,9 @@
|
|||
package org.hibernate.cache.spi.support;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.hibernate.cache.CacheException;
|
||||
import org.hibernate.cache.cfg.spi.CollectionDataCachingConfig;
|
||||
|
@ -121,15 +122,16 @@ public abstract class AbstractDomainDataRegion extends AbstractRegion implements
|
|||
|
||||
private Map<NavigableRole, EntityDataAccess> generateEntityDataAccessMap(
|
||||
DomainDataRegionConfig regionConfig) {
|
||||
if ( regionConfig.getEntityCaching().isEmpty() ) {
|
||||
final List<EntityDataCachingConfig> entityCaching = regionConfig.getEntityCaching();
|
||||
if ( entityCaching.isEmpty() ) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
final Map<NavigableRole, EntityDataAccess> accessMap = new ConcurrentHashMap<>();
|
||||
for ( EntityDataCachingConfig entityAccessConfig : regionConfig.getEntityCaching() ) {
|
||||
accessMap.computeIfAbsent(
|
||||
final Map<NavigableRole, EntityDataAccess> accessMap = new HashMap<>( entityCaching.size() );
|
||||
for ( EntityDataCachingConfig entityAccessConfig : entityCaching ) {
|
||||
accessMap.put(
|
||||
entityAccessConfig.getNavigableRole(),
|
||||
hierarchy -> generateEntityAccess( entityAccessConfig )
|
||||
generateEntityAccess( entityAccessConfig )
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -137,15 +139,16 @@ public abstract class AbstractDomainDataRegion extends AbstractRegion implements
|
|||
}
|
||||
|
||||
private Map<NavigableRole, NaturalIdDataAccess> generateNaturalIdDataAccessMap(DomainDataRegionConfig regionConfig) {
|
||||
if ( regionConfig.getNaturalIdCaching().isEmpty() ) {
|
||||
final List<NaturalIdDataCachingConfig> naturalIdCaching = regionConfig.getNaturalIdCaching();
|
||||
if ( naturalIdCaching.isEmpty() ) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
final Map<NavigableRole, NaturalIdDataAccess> accessMap = new ConcurrentHashMap<>();
|
||||
for ( NaturalIdDataCachingConfig naturalIdAccessConfig : regionConfig.getNaturalIdCaching() ) {
|
||||
accessMap.computeIfAbsent(
|
||||
final Map<NavigableRole, NaturalIdDataAccess> accessMap = new HashMap<>( naturalIdCaching.size() );
|
||||
for ( NaturalIdDataCachingConfig naturalIdAccessConfig : naturalIdCaching ) {
|
||||
accessMap.put(
|
||||
naturalIdAccessConfig.getNavigableRole(),
|
||||
hierarchy -> generateNaturalIdAccess( naturalIdAccessConfig )
|
||||
generateNaturalIdAccess( naturalIdAccessConfig )
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -154,15 +157,16 @@ public abstract class AbstractDomainDataRegion extends AbstractRegion implements
|
|||
|
||||
private Map<NavigableRole, CollectionDataAccess> generateCollectionDataAccessMap(
|
||||
DomainDataRegionConfig regionConfig) {
|
||||
if ( regionConfig.getCollectionCaching().isEmpty() ) {
|
||||
final List<CollectionDataCachingConfig> collectionCaching = regionConfig.getCollectionCaching();
|
||||
if ( collectionCaching.isEmpty() ) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
final Map<NavigableRole, CollectionDataAccess> accessMap = new ConcurrentHashMap<>();
|
||||
for ( CollectionDataCachingConfig cachingConfig : regionConfig.getCollectionCaching() ) {
|
||||
accessMap.computeIfAbsent(
|
||||
final Map<NavigableRole, CollectionDataAccess> accessMap = new HashMap<>( collectionCaching.size() );
|
||||
for ( CollectionDataCachingConfig cachingConfig : collectionCaching ) {
|
||||
accessMap.put(
|
||||
cachingConfig.getNavigableRole(),
|
||||
hierarchy -> generateCollectionAccess( cachingConfig )
|
||||
generateCollectionAccess( cachingConfig )
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -2060,4 +2060,19 @@ public interface AvailableSettings extends org.hibernate.jpa.AvailableSettings {
|
|||
* @since 5.4
|
||||
*/
|
||||
String SEQUENCE_INCREMENT_SIZE_MISMATCH_STRATEGY = "hibernate.id.sequence.increment_size_mismatch_strategy";
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* When you use {@link javax.persistence.InheritanceType#JOINED} strategy for inheritance mapping and query
|
||||
* a value from an entity, all superclass tables are joined in the query regardless you need them. With
|
||||
* this setting set to true only superclass tables which are really needed are joined.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value is true.
|
||||
* </p>
|
||||
*
|
||||
* @since 5.4
|
||||
*/
|
||||
String OMIT_JOIN_OF_SUPERCLASS_TABLES = "hibernate.query.omit_join_of_superclass_tables";
|
||||
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
package org.hibernate.cfg;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
|
@ -17,6 +18,9 @@ import org.hibernate.engine.internal.StatisticalLoggingSessionEventListener;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class BaselineSessionEventsListenerBuilder {
|
||||
|
||||
private static final SessionEventListener[] EMPTY = new SessionEventListener[0];
|
||||
|
||||
private boolean logSessionMetrics;
|
||||
private Class<? extends SessionEventListener> autoListener;
|
||||
|
||||
|
@ -33,6 +37,10 @@ public class BaselineSessionEventsListenerBuilder {
|
|||
}
|
||||
|
||||
@SuppressWarnings("UnusedDeclaration")
|
||||
/**
|
||||
* @deprecated this method will be removed as this builder should become immutable
|
||||
*/
|
||||
@Deprecated
|
||||
public void setLogSessionMetrics(boolean logSessionMetrics) {
|
||||
this.logSessionMetrics = logSessionMetrics;
|
||||
}
|
||||
|
@ -43,26 +51,59 @@ public class BaselineSessionEventsListenerBuilder {
|
|||
}
|
||||
|
||||
@SuppressWarnings("UnusedDeclaration")
|
||||
/**
|
||||
* @deprecated this method will be removed as this builder should become immutable
|
||||
*/
|
||||
@Deprecated
|
||||
public void setAutoListener(Class<? extends SessionEventListener> autoListener) {
|
||||
this.autoListener = autoListener;
|
||||
}
|
||||
|
||||
public List<SessionEventListener> buildBaselineList() {
|
||||
List<SessionEventListener> list = new ArrayList<SessionEventListener>();
|
||||
if ( logSessionMetrics && StatisticalLoggingSessionEventListener.isLoggingEnabled() ) {
|
||||
list.add( new StatisticalLoggingSessionEventListener() );
|
||||
}
|
||||
if ( autoListener != null ) {
|
||||
try {
|
||||
list.add( autoListener.newInstance() );
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new HibernateException(
|
||||
"Unable to instantiate specified auto SessionEventListener : " + autoListener.getName(),
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
final SessionEventListener[] sessionEventListeners = buildBaseline();
|
||||
//Capacity: needs to hold at least all elements from the baseline, but also expect to add a little more later.
|
||||
ArrayList<SessionEventListener> list = new ArrayList<>( sessionEventListeners.length + 3 );
|
||||
Collections.addAll( list, sessionEventListeners );
|
||||
return list;
|
||||
}
|
||||
|
||||
public SessionEventListener[] buildBaseline() {
|
||||
final boolean addStats = logSessionMetrics && StatisticalLoggingSessionEventListener.isLoggingEnabled();
|
||||
final boolean addAutoListener = autoListener != null;
|
||||
final SessionEventListener[] arr;
|
||||
if ( addStats && addAutoListener ) {
|
||||
arr = new SessionEventListener[2];
|
||||
arr[0] = buildStatsListener();
|
||||
arr[1] = buildAutoListener( autoListener );
|
||||
}
|
||||
else if ( !addStats && !addAutoListener ) {
|
||||
arr = EMPTY;
|
||||
}
|
||||
else if ( !addStats && addAutoListener ) {
|
||||
arr = new SessionEventListener[1];
|
||||
arr[0] = buildAutoListener( autoListener );
|
||||
}
|
||||
else { //Last case: if ( addStats && !addAutoListener )
|
||||
arr = new SessionEventListener[1];
|
||||
arr[0] = buildStatsListener();
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
private static SessionEventListener buildAutoListener(final Class<? extends SessionEventListener> autoListener) {
|
||||
try {
|
||||
return autoListener.newInstance();
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new HibernateException(
|
||||
"Unable to instantiate specified auto SessionEventListener : " + autoListener.getName(),
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private static SessionEventListener buildStatsListener() {
|
||||
return new StatisticalLoggingSessionEventListener();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1124,19 +1124,19 @@ public class BinderHelper {
|
|||
|
||||
public static Map<String,String> toAliasTableMap(SqlFragmentAlias[] aliases){
|
||||
Map<String,String> ret = new HashMap<>();
|
||||
for ( int i = 0; i < aliases.length; i++ ) {
|
||||
if ( StringHelper.isNotEmpty( aliases[i].table() ) ) {
|
||||
ret.put( aliases[i].alias(), aliases[i].table() );
|
||||
}
|
||||
for ( SqlFragmentAlias aliase : aliases ) {
|
||||
if ( StringHelper.isNotEmpty( aliase.table() ) ) {
|
||||
ret.put( aliase.alias(), aliase.table() );
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static Map<String,String> toAliasEntityMap(SqlFragmentAlias[] aliases){
|
||||
Map<String,String> ret = new HashMap<>();
|
||||
for ( int i = 0; i < aliases.length; i++ ) {
|
||||
if ( aliases[i].entity() != void.class ) {
|
||||
ret.put( aliases[i].alias(), aliases[i].entity().getName() );
|
||||
for ( SqlFragmentAlias aliase : aliases ) {
|
||||
if ( aliase.entity() != void.class ) {
|
||||
ret.put( aliase.alias(), aliase.entity().getName() );
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
|
|
|
@ -247,8 +247,8 @@ public abstract class ExternalSessionFactoryConfig {
|
|||
|
||||
|
||||
String[] mappingFiles = ConfigurationHelper.toStringArray( mapResources, " ,\n\t\r\f" );
|
||||
for ( int i = 0; i < mappingFiles.length; i++ ) {
|
||||
cfg.addResource( mappingFiles[i] );
|
||||
for ( String mappingFile : mappingFiles ) {
|
||||
cfg.addResource( mappingFile );
|
||||
}
|
||||
|
||||
return cfg;
|
||||
|
|
|
@ -66,8 +66,8 @@ public class IndexOrUniqueKeySecondPass implements SecondPass {
|
|||
@Override
|
||||
public void doSecondPass(Map persistentClasses) throws MappingException {
|
||||
if ( columns != null ) {
|
||||
for ( int i = 0; i < columns.length; i++ ) {
|
||||
addConstraintToColumn( columns[i] );
|
||||
for ( String column1 : columns ) {
|
||||
addConstraintToColumn( column1 );
|
||||
}
|
||||
}
|
||||
if ( column != null ) {
|
||||
|
|
|
@ -76,7 +76,7 @@ public final class Settings {
|
|||
LOG.debugf( "Check Nullability in Core (should be disabled when Bean Validation is on): %s", enabledDisabled( sessionFactoryOptions.isCheckNullability() ) );
|
||||
LOG.debugf( "Allow initialization of lazy state outside session : %s", enabledDisabled( sessionFactoryOptions.isInitializeLazyStateOutsideTransactionsEnabled() ) );
|
||||
|
||||
LOG.debugf( "Using BatchFetchStyle : " + sessionFactoryOptions.getBatchFetchStyle().name() );
|
||||
LOG.debugf( "Using BatchFetchStyle : %", sessionFactoryOptions.getBatchFetchStyle().name() );
|
||||
LOG.debugf( "Default batch fetch size: %s", sessionFactoryOptions.getDefaultBatchFetchSize() );
|
||||
LOG.debugf( "Maximum outer join fetch depth: %s", sessionFactoryOptions.getMaximumFetchDepth() );
|
||||
LOG.debugf( "Default null ordering: %s", sessionFactoryOptions.getDefaultNullPrecedence() );
|
||||
|
|
|
@ -9,7 +9,9 @@ package org.hibernate.dialect;
|
|||
import java.sql.Types;
|
||||
|
||||
import org.hibernate.dialect.function.SQLFunction;
|
||||
import org.hibernate.dialect.function.StandardSQLFunction;
|
||||
import org.hibernate.dialect.function.StaticPrecisionFspTimestampFunction;
|
||||
import org.hibernate.type.StandardBasicTypes;
|
||||
|
||||
/**
|
||||
* @author Gail Badner
|
||||
|
@ -64,6 +66,11 @@ public class MySQL57Dialect extends MySQL55Dialect {
|
|||
|
||||
// from_unixtime(), timestamp() are functions that return TIMESTAMP that do not support a
|
||||
// fractional seconds precision argument (so there's no need to override them here):
|
||||
|
||||
registerFunction( "weight_string", new StandardSQLFunction( "weight_string", StandardBasicTypes.STRING ) );
|
||||
|
||||
registerFunction( "to_base64", new StandardSQLFunction( "to_base64", StandardBasicTypes.STRING ) );
|
||||
registerFunction( "from_base64", new StandardSQLFunction( "from_base64", StandardBasicTypes.STRING ) );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
package org.hibernate.dialect;
|
||||
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.dialect.function.StandardSQLFunction;
|
||||
import org.hibernate.type.StandardBasicTypes;
|
||||
|
||||
/**
|
||||
* @author Vlad Mihalcea
|
||||
|
@ -33,6 +35,10 @@ public class MySQL8Dialect extends MySQL57Dialect {
|
|||
registerKeyword("PERSIST_ONLY");
|
||||
registerKeyword("RANK");
|
||||
registerKeyword("ROW_NUMBER");
|
||||
|
||||
registerFunction( "regexp_replace", new StandardSQLFunction( "regexp_replace", StandardBasicTypes.STRING ) );
|
||||
registerFunction( "regexp_instr", new StandardSQLFunction( "regexp_instr", StandardBasicTypes.INTEGER ) );
|
||||
registerFunction( "regexp_substr", new StandardSQLFunction( "regexp_substr", StandardBasicTypes.STRING ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -431,4 +431,9 @@ public class SybaseASE15Dialect extends SybaseDialect {
|
|||
public boolean supportsLockTimeouts() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsPartitionBy() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,17 +73,12 @@ public class PessimisticReadSelectLockingStrategy extends AbstractSelectLockingS
|
|||
}
|
||||
|
||||
final ResultSet rs = jdbcCoordinator.getResultSetReturn().extract( st );
|
||||
try {
|
||||
if ( !rs.next() ) {
|
||||
final StatisticsImplementor statistics = factory.getStatistics();
|
||||
if ( statistics.isStatisticsEnabled() ) {
|
||||
statistics.optimisticFailure( lockable.getEntityName() );
|
||||
}
|
||||
throw new StaleObjectStateException( lockable.getEntityName(), id );
|
||||
if ( !rs.next() ) {
|
||||
final StatisticsImplementor statistics = factory.getStatistics();
|
||||
if ( statistics.isStatisticsEnabled() ) {
|
||||
statistics.optimisticFailure( lockable.getEntityName() );
|
||||
}
|
||||
}
|
||||
finally {
|
||||
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( rs, st );
|
||||
throw new StaleObjectStateException( lockable.getEntityName(), id );
|
||||
}
|
||||
}
|
||||
finally {
|
||||
|
|
|
@ -9,6 +9,7 @@ package org.hibernate.engine.internal;
|
|||
import java.io.IOException;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.CustomEntityDirtinessStrategy;
|
||||
|
@ -309,7 +310,7 @@ public abstract class AbstractEntityEntry implements Serializable, EntityEntry {
|
|||
return !isExistsInDatabase();
|
||||
}
|
||||
else {
|
||||
return session.getPersistenceContextInternal().getNullifiableEntityKeys().contains( getEntityKey() );
|
||||
return session.getPersistenceContextInternal().containsNullifiableEntityKey( this::getEntityKey );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -160,7 +160,7 @@ public final class ForeignKeys {
|
|||
if ( isDelete &&
|
||||
value == LazyPropertyInitializer.UNFETCHED_PROPERTY &&
|
||||
type.isEntityType() &&
|
||||
!session.getPersistenceContextInternal().getNullifiableEntityKeys().isEmpty() ) {
|
||||
!session.getPersistenceContextInternal().isNullifiableEntityKeysEmpty() ) {
|
||||
// IMPLEMENTATION NOTE: If cascade-remove was mapped for the attribute,
|
||||
// then value should have been initialized previously, when the remove operation was
|
||||
// cascaded to the property (because CascadingAction.DELETE.performOnLazyProperty()
|
||||
|
|
|
@ -47,6 +47,7 @@ public class JoinSequence {
|
|||
private Selector selector;
|
||||
private JoinSequence next;
|
||||
private boolean isFromPart;
|
||||
private Set<String> queryReferencedTables;
|
||||
|
||||
/**
|
||||
* Constructs a JoinSequence
|
||||
|
@ -466,7 +467,7 @@ public class JoinSequence {
|
|||
Set<String> treatAsDeclarations) {
|
||||
final boolean include = includeSubclassJoins && isIncluded( alias );
|
||||
joinFragment.addJoins(
|
||||
joinable.fromJoinFragment( alias, innerJoin, include, treatAsDeclarations ),
|
||||
joinable.fromJoinFragment( alias, innerJoin, include, treatAsDeclarations, queryReferencedTables ),
|
||||
joinable.whereJoinFragment( alias, innerJoin, include, treatAsDeclarations )
|
||||
);
|
||||
}
|
||||
|
@ -573,6 +574,15 @@ public class JoinSequence {
|
|||
return useThetaStyle;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set all tables the query refers to. It allows to optimize the query.
|
||||
*
|
||||
* @param queryReferencedTables
|
||||
*/
|
||||
public void setQueryReferencedTables(Set<String> queryReferencedTables) {
|
||||
this.queryReferencedTables = queryReferencedTables;
|
||||
}
|
||||
|
||||
public Join getFirstJoin() {
|
||||
return joins.get( 0 );
|
||||
}
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
package org.hibernate.engine.internal;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.hibernate.SessionEventListener;
|
||||
import org.hibernate.engine.spi.SessionEventListenerManager;
|
||||
|
@ -17,255 +17,269 @@ import org.hibernate.engine.spi.SessionEventListenerManager;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class SessionEventListenerManagerImpl implements SessionEventListenerManager, Serializable {
|
||||
private List<SessionEventListener> listenerList;
|
||||
|
||||
private SessionEventListener[] listeners;
|
||||
|
||||
public SessionEventListenerManagerImpl(SessionEventListener... initialListener) {
|
||||
//no need for defensive copies until the array is mutated:
|
||||
this.listeners = initialListener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addListener(SessionEventListener... listeners) {
|
||||
if ( listenerList == null ) {
|
||||
listenerList = new ArrayList<>();
|
||||
public void addListener(final SessionEventListener... additionalListeners) {
|
||||
Objects.requireNonNull( additionalListeners );
|
||||
final SessionEventListener[] existing = this.listeners;
|
||||
if ( existing == null ) {
|
||||
//Make a defensive copy as this array can be tracked back to API (user code)
|
||||
this.listeners = Arrays.copyOf( additionalListeners, additionalListeners.length );
|
||||
}
|
||||
else {
|
||||
// Resize our existing array and add the new listeners
|
||||
final SessionEventListener[] newlist = new SessionEventListener[ existing.length + additionalListeners.length ];
|
||||
System.arraycopy( existing, 0, newlist, 0, existing.length );
|
||||
System.arraycopy( additionalListeners, 0, newlist, existing.length, additionalListeners.length );
|
||||
this.listeners = newlist;
|
||||
}
|
||||
|
||||
java.util.Collections.addAll( listenerList, listeners );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transactionCompletion(boolean successful) {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.transactionCompletion( successful );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionAcquisitionStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcConnectionAcquisitionStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionAcquisitionEnd() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcConnectionAcquisitionEnd();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionReleaseStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcConnectionReleaseStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionReleaseEnd() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcConnectionReleaseEnd();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcPrepareStatementStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcPrepareStatementStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcPrepareStatementEnd() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcPrepareStatementEnd();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteStatementStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcExecuteStatementStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteStatementEnd() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcExecuteStatementEnd();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteBatchStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcExecuteBatchStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteBatchEnd() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.jdbcExecuteBatchEnd();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cachePutStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.cachePutStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cachePutEnd() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.cachePutEnd();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheGetStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.cacheGetStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheGetEnd(boolean hit) {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.cacheGetEnd( hit );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flushStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.flushStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flushEnd(int numberOfEntities, int numberOfCollections) {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.flushEnd( numberOfEntities, numberOfCollections );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void partialFlushStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.partialFlushStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void partialFlushEnd(int numberOfEntities, int numberOfCollections) {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.partialFlushEnd( numberOfEntities, numberOfCollections );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dirtyCalculationStart() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.dirtyCalculationStart();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dirtyCalculationEnd(boolean dirty) {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.dirtyCalculationEnd( dirty );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void end() {
|
||||
if ( listenerList == null ) {
|
||||
if ( listeners == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
for ( SessionEventListener listener : listenerList ) {
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
listener.end();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import java.io.ObjectOutputStream;
|
|||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
|
@ -21,6 +22,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.Hibernate;
|
||||
|
@ -58,6 +61,7 @@ import org.hibernate.event.spi.EventSource;
|
|||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.internal.util.collections.ConcurrentReferenceHashMap;
|
||||
import org.hibernate.internal.util.collections.IdentityMap;
|
||||
import org.hibernate.metamodel.spi.MetamodelImplementor;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
|
@ -78,6 +82,7 @@ import org.jboss.logging.Logger;
|
|||
* their processing.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
* @author Sanne Grinovero
|
||||
*/
|
||||
public class StatefulPersistenceContext implements PersistenceContext {
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
|
||||
|
@ -96,7 +101,6 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
private Map<EntityUniqueKey, Object> entitiesByUniqueKey;
|
||||
|
||||
private EntityEntryContext entityEntryContext;
|
||||
// private Map<Object,EntityEntry> entityEntries;
|
||||
|
||||
// Entity proxies, by EntityKey
|
||||
private ConcurrentMap<EntityKey, Object> proxiesByKey;
|
||||
|
@ -153,30 +157,25 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
this.session = session;
|
||||
|
||||
entitiesByKey = new HashMap<>( INIT_COLL_SIZE );
|
||||
entitiesByUniqueKey = new HashMap<>( INIT_COLL_SIZE );
|
||||
//noinspection unchecked
|
||||
proxiesByKey = new ConcurrentReferenceHashMap<>(
|
||||
INIT_COLL_SIZE,
|
||||
.75f,
|
||||
1,
|
||||
ConcurrentReferenceHashMap.ReferenceType.STRONG,
|
||||
ConcurrentReferenceHashMap.ReferenceType.WEAK,
|
||||
null
|
||||
);
|
||||
entitySnapshotsByKey = new HashMap<>( INIT_COLL_SIZE );
|
||||
|
||||
entityEntryContext = new EntityEntryContext( this );
|
||||
// entityEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
|
||||
collectionEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
|
||||
parentsByChild = new IdentityHashMap<>( INIT_COLL_SIZE );
|
||||
|
||||
collectionsByKey = new HashMap<>( INIT_COLL_SIZE );
|
||||
arrayHolders = new IdentityHashMap<>( INIT_COLL_SIZE );
|
||||
}
|
||||
|
||||
nullifiableEntityKeys = new HashSet<>();
|
||||
|
||||
nullAssociations = new HashSet<>( INIT_COLL_SIZE );
|
||||
nonlazyCollections = new ArrayList<>( INIT_COLL_SIZE );
|
||||
private ConcurrentMap<EntityKey, Object> getOrInitializeProxiesByKey() {
|
||||
if ( proxiesByKey == null ) {
|
||||
//noinspection unchecked
|
||||
proxiesByKey = new ConcurrentReferenceHashMap<>(
|
||||
INIT_COLL_SIZE,
|
||||
.75f,
|
||||
1,
|
||||
ConcurrentReferenceHashMap.ReferenceType.STRONG,
|
||||
ConcurrentReferenceHashMap.ReferenceType.WEAK,
|
||||
null
|
||||
);
|
||||
}
|
||||
return proxiesByKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -199,7 +198,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public void addUnownedCollection(CollectionKey key, PersistentCollection collection) {
|
||||
if (unownedCollections==null) {
|
||||
if ( unownedCollections == null ) {
|
||||
unownedCollections = new HashMap<>( INIT_COLL_SIZE );
|
||||
}
|
||||
unownedCollections.put( key, collection );
|
||||
|
@ -212,20 +211,20 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public BatchFetchQueue getBatchFetchQueue() {
|
||||
if (batchFetchQueue==null) {
|
||||
batchFetchQueue = new BatchFetchQueue(this);
|
||||
if ( batchFetchQueue == null ) {
|
||||
batchFetchQueue = new BatchFetchQueue( this );
|
||||
}
|
||||
return batchFetchQueue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
for ( Object o : proxiesByKey.values() ) {
|
||||
if ( o == null ) {
|
||||
//entry may be GCd
|
||||
continue;
|
||||
}
|
||||
((HibernateProxy) o).getHibernateLazyInitializer().unsetSession();
|
||||
if ( proxiesByKey != null ) {
|
||||
proxiesByKey.forEach( (k,o) -> {
|
||||
if ( o != null) {
|
||||
((HibernateProxy) o).getHibernateLazyInitializer().unsetSession();
|
||||
}
|
||||
} );
|
||||
}
|
||||
|
||||
for ( Entry<Object, EntityEntry> objectEntityEntryEntry : entityEntryContext.reentrantSafeEntityEntries() ) {
|
||||
|
@ -238,22 +237,22 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
|
||||
final SharedSessionContractImplementor session = getSession();
|
||||
IdentityMap.onEachKey( collectionEntries, k -> k.unsetSession( session ) );
|
||||
if ( collectionEntries != null ) {
|
||||
IdentityMap.onEachKey( collectionEntries, k -> k.unsetSession( session ) );
|
||||
}
|
||||
|
||||
arrayHolders.clear();
|
||||
arrayHolders = null;
|
||||
entitiesByKey.clear();
|
||||
entitiesByUniqueKey.clear();
|
||||
entitiesByUniqueKey = null;
|
||||
entityEntryContext.clear();
|
||||
// entityEntries.clear();
|
||||
parentsByChild.clear();
|
||||
parentsByChild = null;
|
||||
entitySnapshotsByKey.clear();
|
||||
collectionsByKey.clear();
|
||||
collectionEntries.clear();
|
||||
if ( unownedCollections != null ) {
|
||||
unownedCollections.clear();
|
||||
}
|
||||
proxiesByKey.clear();
|
||||
nullifiableEntityKeys.clear();
|
||||
nonlazyCollections = null;
|
||||
collectionEntries = null;
|
||||
unownedCollections = null;
|
||||
proxiesByKey = null;
|
||||
nullifiableEntityKeys = null;
|
||||
if ( batchFetchQueue != null ) {
|
||||
batchFetchQueue.clear();
|
||||
}
|
||||
|
@ -262,7 +261,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
if ( loadContexts != null ) {
|
||||
loadContexts.cleanup();
|
||||
}
|
||||
naturalIdXrefDelegate.clear();
|
||||
naturalIdXrefDelegate = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -295,11 +294,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
@Override
|
||||
public void afterTransactionCompletion() {
|
||||
cleanUpInsertedKeysAfterTransaction();
|
||||
// Downgrade locks
|
||||
entityEntryContext.downgradeLocks();
|
||||
// // Downgrade locks
|
||||
// for ( EntityEntry o : entityEntries.values() ) {
|
||||
// o.setLockMode( LockMode.NONE );
|
||||
// }
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -406,30 +402,38 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
@Override
|
||||
public Object removeEntity(EntityKey key) {
|
||||
final Object entity = entitiesByKey.remove( key );
|
||||
final Iterator itr = entitiesByUniqueKey.values().iterator();
|
||||
while ( itr.hasNext() ) {
|
||||
if ( itr.next() == entity ) {
|
||||
itr.remove();
|
||||
if ( entitiesByUniqueKey != null ) {
|
||||
final Iterator itr = entitiesByUniqueKey.values().iterator();
|
||||
while ( itr.hasNext() ) {
|
||||
if ( itr.next() == entity ) {
|
||||
itr.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clear all parent cache
|
||||
parentsByChild.clear();
|
||||
parentsByChild = null;
|
||||
entitySnapshotsByKey.remove( key );
|
||||
nullifiableEntityKeys.remove( key );
|
||||
if ( nullifiableEntityKeys != null ) {
|
||||
nullifiableEntityKeys.remove( key );
|
||||
}
|
||||
if( batchFetchQueue != null ) {
|
||||
getBatchFetchQueue().removeBatchLoadableEntityKey(key);
|
||||
getBatchFetchQueue().removeSubselect(key);
|
||||
getBatchFetchQueue().removeBatchLoadableEntityKey( key );
|
||||
getBatchFetchQueue().removeSubselect( key );
|
||||
}
|
||||
return entity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getEntity(EntityUniqueKey euk) {
|
||||
return entitiesByUniqueKey.get( euk );
|
||||
return entitiesByUniqueKey == null ? null : entitiesByUniqueKey.get( euk );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addEntity(EntityUniqueKey euk, Object entity) {
|
||||
if ( entitiesByUniqueKey == null ) {
|
||||
entitiesByUniqueKey = new HashMap<>( INIT_COLL_SIZE );
|
||||
}
|
||||
entitiesByUniqueKey.put( euk, entity );
|
||||
}
|
||||
|
||||
|
@ -450,7 +454,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public CollectionEntry getCollectionEntry(PersistentCollection coll) {
|
||||
return collectionEntries.get( coll );
|
||||
return collectionEntries == null ? null : collectionEntries.get( coll );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -559,12 +563,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public boolean containsCollection(PersistentCollection collection) {
|
||||
return collectionEntries.containsKey( collection );
|
||||
return collectionEntries != null && collectionEntries.containsKey( collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsProxy(Object entity) {
|
||||
return proxiesByKey.containsValue( entity );
|
||||
return proxiesByKey != null && proxiesByKey.containsValue( entity );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -616,7 +620,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
final EntityPersister persister = session.getFactory().getMetamodel().entityPersister( li.getEntityName() );
|
||||
final EntityKey key = session.generateEntityKey( li.getIdentifier(), persister );
|
||||
// any earlier proxy takes precedence
|
||||
proxiesByKey.putIfAbsent( key, proxy );
|
||||
getOrInitializeProxiesByKey().putIfAbsent( key, proxy );
|
||||
proxy.getHibernateLazyInitializer().setSession( session );
|
||||
}
|
||||
}
|
||||
|
@ -686,7 +690,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
// If an impl is passed, there is really no point in creating a proxy.
|
||||
// It would just be extra processing. Just return the impl
|
||||
if ( object != null ) {
|
||||
proxiesByKey.remove( key );
|
||||
removeProxyByKey( key );
|
||||
return object;
|
||||
}
|
||||
|
||||
|
@ -697,7 +701,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
final Object impl = originalHibernateProxy.getHibernateLazyInitializer().getImplementation();
|
||||
// can we return it?
|
||||
if ( concreteProxyClass.isInstance( impl ) ) {
|
||||
proxiesByKey.remove( key );
|
||||
removeProxyByKey( key );
|
||||
return impl;
|
||||
}
|
||||
}
|
||||
|
@ -722,12 +726,19 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
}
|
||||
|
||||
private Object removeProxyByKey(final EntityKey key) {
|
||||
if ( proxiesByKey != null ) {
|
||||
return proxiesByKey.remove( key );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object proxyFor(EntityPersister persister, EntityKey key, Object impl) throws HibernateException {
|
||||
if ( !persister.hasProxy() ) {
|
||||
return impl;
|
||||
}
|
||||
final Object proxy = proxiesByKey.get( key );
|
||||
final Object proxy = getProxy( key );
|
||||
return ( proxy != null ) ? narrowProxy( proxy, persister, key, impl ) : impl;
|
||||
}
|
||||
|
||||
|
@ -877,7 +888,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
* @param key The key of the collection's entry.
|
||||
*/
|
||||
private void addCollection(PersistentCollection coll, CollectionEntry entry, Serializable key) {
|
||||
collectionEntries.put( coll, entry );
|
||||
getOrInitializeCollectionEntries().put( coll, entry );
|
||||
final CollectionKey collectionKey = new CollectionKey( entry.getLoadedPersister(), key );
|
||||
final PersistentCollection old = collectionsByKey.put( collectionKey, coll );
|
||||
if ( old != null ) {
|
||||
|
@ -886,12 +897,21 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
// or should it actually throw an exception?
|
||||
old.unsetSession( session );
|
||||
collectionEntries.remove( old );
|
||||
if ( collectionEntries != null ) {
|
||||
collectionEntries.remove( old );
|
||||
}
|
||||
// watch out for a case where old is still referenced
|
||||
// somewhere in the object graph! (which is a user error)
|
||||
}
|
||||
}
|
||||
|
||||
private IdentityMap<PersistentCollection, CollectionEntry> getOrInitializeCollectionEntries() {
|
||||
if ( this.collectionEntries == null ) {
|
||||
this.collectionEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
|
||||
}
|
||||
return this.collectionEntries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a collection to the cache, creating a new collection entry for it
|
||||
*
|
||||
|
@ -900,7 +920,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
*/
|
||||
private void addCollection(PersistentCollection collection, CollectionPersister persister) {
|
||||
final CollectionEntry ce = new CollectionEntry( persister, collection );
|
||||
collectionEntries.put( collection, ce );
|
||||
getOrInitializeCollectionEntries().put( collection, ce );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -932,22 +952,23 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public void addNonLazyCollection(PersistentCollection collection) {
|
||||
if ( nonlazyCollections == null ) {
|
||||
nonlazyCollections = new ArrayList<>( INIT_COLL_SIZE );
|
||||
}
|
||||
nonlazyCollections.add( collection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initializeNonLazyCollections() throws HibernateException {
|
||||
if ( loadCounter == 0 ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Initializing non-lazy collections" );
|
||||
}
|
||||
LOG.trace( "Initializing non-lazy collections" );
|
||||
|
||||
//do this work only at the very highest level of the load
|
||||
//don't let this method be called recursively
|
||||
loadCounter++;
|
||||
try {
|
||||
int size;
|
||||
while ( ( size = nonlazyCollections.size() ) > 0 ) {
|
||||
while ( nonlazyCollections != null && ( size = nonlazyCollections.size() ) > 0 ) {
|
||||
//note that each iteration of the loop may add new elements
|
||||
nonlazyCollections.remove( size - 1 ).forceInitialization();
|
||||
}
|
||||
|
@ -961,18 +982,21 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public PersistentCollection getCollectionHolder(Object array) {
|
||||
return arrayHolders.get( array );
|
||||
return arrayHolders == null ? null : arrayHolders.get( array );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addCollectionHolder(PersistentCollection holder) {
|
||||
//TODO:refactor + make this method private
|
||||
if ( arrayHolders == null ) {
|
||||
arrayHolders = new IdentityHashMap<>( INIT_COLL_SIZE );
|
||||
}
|
||||
arrayHolders.put( holder.getValue(), holder );
|
||||
}
|
||||
|
||||
@Override
|
||||
public PersistentCollection removeCollectionHolder(Object array) {
|
||||
return arrayHolders.remove( array );
|
||||
return arrayHolders != null ? arrayHolders.remove( array ) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -989,7 +1013,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
else {
|
||||
coll = getCollectionHolder( collection );
|
||||
if ( coll == null ) {
|
||||
if ( coll == null && collectionEntries != null ) {
|
||||
//it might be an unwrapped collection reference!
|
||||
//try to find a wrapper (slowish)
|
||||
final Iterator<PersistentCollection> wrappers = collectionEntries.keyIterator();
|
||||
|
@ -1008,12 +1032,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public Object getProxy(EntityKey key) {
|
||||
return proxiesByKey.get( key );
|
||||
return proxiesByKey == null ? null : proxiesByKey.get( key );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addProxy(EntityKey key, Object proxy) {
|
||||
proxiesByKey.put( key, proxy );
|
||||
getOrInitializeProxiesByKey().put( key, proxy );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1022,11 +1046,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
batchFetchQueue.removeBatchLoadableEntityKey( key );
|
||||
batchFetchQueue.removeSubselect( key );
|
||||
}
|
||||
return proxiesByKey.remove( key );
|
||||
return removeProxyByKey( key );
|
||||
}
|
||||
|
||||
@Override
|
||||
public HashSet getNullifiableEntityKeys() {
|
||||
if ( nullifiableEntityKeys == null ) {
|
||||
nullifiableEntityKeys = new HashSet<>();
|
||||
}
|
||||
return nullifiableEntityKeys;
|
||||
}
|
||||
|
||||
|
@ -1035,10 +1062,6 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
return entitiesByKey;
|
||||
}
|
||||
|
||||
public Map getProxiesByKey() {
|
||||
return proxiesByKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumberOfManagedEntities() {
|
||||
return entityEntryContext.getNumberOfManagedEntities();
|
||||
|
@ -1049,9 +1072,29 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated We should not expose this directly: the other accessors that have been created as a replacement
|
||||
* have better chances of skipping initializing this map, which is a good performance improvement.
|
||||
* @return the map of managed collection entries.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public Map getCollectionEntries() {
|
||||
return collectionEntries;
|
||||
return getOrInitializeCollectionEntries();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEachCollectionEntry(BiConsumer<PersistentCollection, CollectionEntry> action, boolean concurrent) {
|
||||
if ( collectionEntries != null ) {
|
||||
if ( concurrent ) {
|
||||
for ( Map.Entry<PersistentCollection,CollectionEntry> entry : IdentityMap.concurrentEntries( collectionEntries ) ) {
|
||||
action.accept( entry.getKey(), entry.getValue() );
|
||||
}
|
||||
}
|
||||
else {
|
||||
collectionEntries.forEach( action );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1163,7 +1206,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
final CollectionPersister collectionPersister = session.getFactory().getMetamodel().collectionPersister( collectionRole );
|
||||
|
||||
// try cache lookup first
|
||||
final Object parent = parentsByChild.get( childEntity );
|
||||
final Object parent = getParentsByChild( childEntity );
|
||||
if ( parent != null ) {
|
||||
final EntityEntry entityEntry = entityEntryContext.getEntityEntry( parent );
|
||||
//there maybe more than one parent, filter by type
|
||||
|
@ -1173,7 +1216,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
else {
|
||||
// remove wrong entry
|
||||
parentsByChild.remove( childEntity );
|
||||
removeChildParent( childEntity );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1264,6 +1307,13 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
return null;
|
||||
}
|
||||
|
||||
private Object getParentsByChild(Object childEntity) {
|
||||
if ( parentsByChild != null ) {
|
||||
parentsByChild.get( childEntity );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private boolean isFoundInParent(
|
||||
String property,
|
||||
Object childEntity,
|
||||
|
@ -1278,18 +1328,19 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public Object getIndexInOwner(String entity, String property, Object childEntity, Map mergeMap) {
|
||||
final EntityPersister persister = session.getFactory().getMetamodel().entityPersister( entity );
|
||||
final CollectionPersister cp = session.getFactory().getMetamodel().collectionPersister( entity + '.' + property );
|
||||
final MetamodelImplementor metamodel = session.getFactory().getMetamodel();
|
||||
final EntityPersister persister = metamodel.entityPersister( entity );
|
||||
final CollectionPersister cp = metamodel.collectionPersister( entity + '.' + property );
|
||||
|
||||
// try cache lookup first
|
||||
final Object parent = parentsByChild.get( childEntity );
|
||||
final Object parent = getParentsByChild( childEntity );
|
||||
if ( parent != null ) {
|
||||
final EntityEntry entityEntry = entityEntryContext.getEntityEntry( parent );
|
||||
//there maybe more than one parent, filter by type
|
||||
if ( persister.isSubclassEntityName( entityEntry.getEntityName() ) ) {
|
||||
Object index = getIndexInParent( property, childEntity, persister, cp, parent );
|
||||
|
||||
if (index==null && mergeMap!=null) {
|
||||
if ( index == null && mergeMap != null ) {
|
||||
final Object unMergedInstance = mergeMap.get( parent );
|
||||
final Object unMergedChild = mergeMap.get( childEntity );
|
||||
if ( unMergedInstance != null && unMergedChild != null ) {
|
||||
|
@ -1306,7 +1357,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
else {
|
||||
// remove wrong entry
|
||||
parentsByChild.remove( childEntity );
|
||||
removeChildParent( childEntity );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1354,16 +1405,19 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public void addNullProperty(EntityKey ownerKey, String propertyName) {
|
||||
if ( nullAssociations == null ) {
|
||||
nullAssociations = new HashSet<>( INIT_COLL_SIZE );
|
||||
}
|
||||
nullAssociations.add( new AssociationKey( ownerKey, propertyName ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPropertyNull(EntityKey ownerKey, String propertyName) {
|
||||
return nullAssociations.contains( new AssociationKey( ownerKey, propertyName ) );
|
||||
return nullAssociations != null && nullAssociations.contains( new AssociationKey( ownerKey, propertyName ) );
|
||||
}
|
||||
|
||||
private void clearNullProperties() {
|
||||
nullAssociations.clear();
|
||||
nullAssociations = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1378,7 +1432,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
else {
|
||||
final EntityEntry ee = getEntry( entityOrProxy );
|
||||
if ( ee == null ) {
|
||||
throw new TransientObjectException("Instance was not associated with this persistence context" );
|
||||
throw new TransientObjectException( "Instance was not associated with this persistence context" );
|
||||
}
|
||||
isReadOnly = ee.isReadOnly();
|
||||
}
|
||||
|
@ -1415,11 +1469,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
|
||||
private void setProxyReadOnly(HibernateProxy proxy, boolean readOnly) {
|
||||
if ( proxy.getHibernateLazyInitializer().getSession() != getSession() ) {
|
||||
final LazyInitializer hibernateLazyInitializer = proxy.getHibernateLazyInitializer();
|
||||
if ( hibernateLazyInitializer.getSession() != getSession() ) {
|
||||
throw new AssertionFailure(
|
||||
"Attempt to set a proxy to read-only that is associated with a different session" );
|
||||
}
|
||||
proxy.getHibernateLazyInitializer().setReadOnly( readOnly );
|
||||
hibernateLazyInitializer.setReadOnly( readOnly );
|
||||
}
|
||||
|
||||
private void setEntityReadOnly(Object entity, boolean readOnly) {
|
||||
|
@ -1435,7 +1490,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
public void replaceDelayedEntityIdentityInsertKeys(EntityKey oldKey, Serializable generatedId) {
|
||||
final Object entity = entitiesByKey.remove( oldKey );
|
||||
final EntityEntry oldEntry = entityEntryContext.removeEntityEntry( entity );
|
||||
parentsByChild.clear();
|
||||
this.parentsByChild = null;
|
||||
|
||||
final EntityKey newKey = session.generateEntityKey( generatedId, oldEntry.getPersister() );
|
||||
addEntity( newKey, entity );
|
||||
|
@ -1461,9 +1516,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
* @throws IOException serialization errors.
|
||||
*/
|
||||
public void serialize(ObjectOutputStream oos) throws IOException {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Serializing persistence-context" );
|
||||
}
|
||||
LOG.trace( "Serializing persistence-context" );
|
||||
|
||||
oos.writeBoolean( defaultReadOnly );
|
||||
oos.writeBoolean( hasNonReadOnlyEntities );
|
||||
|
@ -1477,22 +1530,32 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
oos.writeObject( entry.getValue() );
|
||||
}
|
||||
|
||||
oos.writeInt( entitiesByUniqueKey.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + entitiesByUniqueKey.size() + "] entitiesByUniqueKey entries" );
|
||||
if ( entitiesByUniqueKey == null ) {
|
||||
oos.writeInt( 0 );
|
||||
}
|
||||
for ( Map.Entry<EntityUniqueKey,Object> entry : entitiesByUniqueKey.entrySet() ) {
|
||||
entry.getKey().serialize( oos );
|
||||
oos.writeObject( entry.getValue() );
|
||||
else {
|
||||
oos.writeInt( entitiesByUniqueKey.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + entitiesByUniqueKey.size() + "] entitiesByUniqueKey entries" );
|
||||
}
|
||||
for ( Map.Entry<EntityUniqueKey,Object> entry : entitiesByUniqueKey.entrySet() ) {
|
||||
entry.getKey().serialize( oos );
|
||||
oos.writeObject( entry.getValue() );
|
||||
}
|
||||
}
|
||||
|
||||
oos.writeInt( proxiesByKey.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + proxiesByKey.size() + "] proxiesByKey entries" );
|
||||
if ( proxiesByKey == null ) {
|
||||
oos.writeInt( 0 );
|
||||
}
|
||||
for ( Map.Entry<EntityKey,Object> entry : proxiesByKey.entrySet() ) {
|
||||
entry.getKey().serialize( oos );
|
||||
oos.writeObject( entry.getValue() );
|
||||
else {
|
||||
oos.writeInt( proxiesByKey.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + proxiesByKey.size() + "] proxiesByKey entries" );
|
||||
}
|
||||
for ( Map.Entry<EntityKey,Object> entry : proxiesByKey.entrySet() ) {
|
||||
entry.getKey().serialize( oos );
|
||||
oos.writeObject( entry.getValue() );
|
||||
}
|
||||
}
|
||||
|
||||
oos.writeInt( entitySnapshotsByKey.size() );
|
||||
|
@ -1515,30 +1578,46 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
oos.writeObject( entry.getValue() );
|
||||
}
|
||||
|
||||
oos.writeInt( collectionEntries.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + collectionEntries.size() + "] collectionEntries entries" );
|
||||
if ( collectionEntries == null ) {
|
||||
oos.writeInt( 0 );
|
||||
}
|
||||
for ( Map.Entry<PersistentCollection,CollectionEntry> entry : collectionEntries.entrySet() ) {
|
||||
oos.writeObject( entry.getKey() );
|
||||
entry.getValue().serialize( oos );
|
||||
else {
|
||||
oos.writeInt( collectionEntries.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + collectionEntries.size() + "] collectionEntries entries" );
|
||||
}
|
||||
for ( Map.Entry<PersistentCollection,CollectionEntry> entry : collectionEntries.entrySet() ) {
|
||||
oos.writeObject( entry.getKey() );
|
||||
entry.getValue().serialize( oos );
|
||||
}
|
||||
}
|
||||
|
||||
oos.writeInt( arrayHolders.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + arrayHolders.size() + "] arrayHolders entries" );
|
||||
if ( arrayHolders == null ) {
|
||||
oos.writeInt( 0 );
|
||||
}
|
||||
for ( Map.Entry<Object,PersistentCollection> entry : arrayHolders.entrySet() ) {
|
||||
oos.writeObject( entry.getKey() );
|
||||
oos.writeObject( entry.getValue() );
|
||||
else {
|
||||
oos.writeInt( arrayHolders.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + arrayHolders.size() + "] arrayHolders entries" );
|
||||
}
|
||||
for ( Map.Entry<Object,PersistentCollection> entry : arrayHolders.entrySet() ) {
|
||||
oos.writeObject( entry.getKey() );
|
||||
oos.writeObject( entry.getValue() );
|
||||
}
|
||||
}
|
||||
|
||||
oos.writeInt( nullifiableEntityKeys.size() );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + nullifiableEntityKeys.size() + "] nullifiableEntityKey entries" );
|
||||
if ( nullifiableEntityKeys == null ) {
|
||||
oos.writeInt( 0 );
|
||||
}
|
||||
for ( EntityKey entry : nullifiableEntityKeys ) {
|
||||
entry.serialize( oos );
|
||||
else {
|
||||
final int size = nullifiableEntityKeys.size();
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting serialization of [" + size + "] nullifiableEntityKey entries" );
|
||||
}
|
||||
oos.writeInt( size );
|
||||
for ( EntityKey entry : nullifiableEntityKeys ) {
|
||||
entry.serialize( oos );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1556,9 +1635,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
public static StatefulPersistenceContext deserialize(
|
||||
ObjectInputStream ois,
|
||||
SessionImplementor session) throws IOException, ClassNotFoundException {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Deserializing persistence-context" );
|
||||
}
|
||||
LOG.trace( "Deserializing persistence-context" );
|
||||
final StatefulPersistenceContext rtn = new StatefulPersistenceContext( session );
|
||||
SessionFactoryImplementor sfi = session.getFactory();
|
||||
|
||||
|
@ -1585,30 +1662,23 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting deserialization of [" + count + "] entitiesByUniqueKey entries" );
|
||||
}
|
||||
rtn.entitiesByUniqueKey = new HashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
rtn.entitiesByUniqueKey.put( EntityUniqueKey.deserialize( ois, session ), ois.readObject() );
|
||||
if ( count != 0 ) {
|
||||
rtn.entitiesByUniqueKey = new HashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
rtn.entitiesByUniqueKey.put( EntityUniqueKey.deserialize( ois, session ), ois.readObject() );
|
||||
}
|
||||
}
|
||||
|
||||
count = ois.readInt();
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting deserialization of [" + count + "] proxiesByKey entries" );
|
||||
}
|
||||
//noinspection unchecked
|
||||
rtn.proxiesByKey = new ConcurrentReferenceHashMap<>(
|
||||
count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count,
|
||||
.75f,
|
||||
1,
|
||||
ConcurrentReferenceHashMap.ReferenceType.STRONG,
|
||||
ConcurrentReferenceHashMap.ReferenceType.WEAK,
|
||||
null
|
||||
);
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
final EntityKey ek = EntityKey.deserialize( ois, sfi );
|
||||
final Object proxy = ois.readObject();
|
||||
if ( proxy instanceof HibernateProxy ) {
|
||||
( (HibernateProxy) proxy ).getHibernateLazyInitializer().setSession( session );
|
||||
rtn.proxiesByKey.put( ek, proxy );
|
||||
rtn.getOrInitializeProxiesByKey().put( ek, proxy );
|
||||
}
|
||||
else {
|
||||
// otherwise, the proxy was pruned during the serialization process
|
||||
|
@ -1642,21 +1712,22 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting deserialization of [" + count + "] collectionEntries entries" );
|
||||
}
|
||||
rtn.collectionEntries = IdentityMap.instantiateSequenced( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
final PersistentCollection pc = (PersistentCollection) ois.readObject();
|
||||
final CollectionEntry ce = CollectionEntry.deserialize( ois, session );
|
||||
pc.setCurrentSession( session );
|
||||
rtn.collectionEntries.put( pc, ce );
|
||||
rtn.getOrInitializeCollectionEntries().put( pc, ce );
|
||||
}
|
||||
|
||||
count = ois.readInt();
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace( "Starting deserialization of [" + count + "] arrayHolders entries" );
|
||||
}
|
||||
rtn.arrayHolders = new IdentityHashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
rtn.arrayHolders.put( ois.readObject(), (PersistentCollection) ois.readObject() );
|
||||
if ( count != 0 ) {
|
||||
rtn.arrayHolders = new IdentityHashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
rtn.arrayHolders.put( ois.readObject(), (PersistentCollection) ois.readObject() );
|
||||
}
|
||||
}
|
||||
|
||||
count = ois.readInt();
|
||||
|
@ -1678,15 +1749,19 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public void addChildParent(Object child, Object parent) {
|
||||
if ( parentsByChild == null ) {
|
||||
parentsByChild = new IdentityHashMap<>( INIT_COLL_SIZE );
|
||||
}
|
||||
parentsByChild.put( child, parent );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeChildParent(Object child) {
|
||||
parentsByChild.remove( child );
|
||||
if ( parentsByChild != null ) {
|
||||
parentsByChild.remove( child );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// INSERTED KEYS HANDLING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
private HashMap<String,List<Serializable>> insertedKeysMap;
|
||||
|
@ -1722,17 +1797,61 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsNullifiableEntityKey(Supplier<EntityKey> sek) {
|
||||
if ( nullifiableEntityKeys == null || nullifiableEntityKeys.size() == 0 ) {
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
final EntityKey entityKey = sek.get();
|
||||
return nullifiableEntityKeys.contains( entityKey );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerNullifiableEntityKey(EntityKey key) {
|
||||
if ( nullifiableEntityKeys == null ) {
|
||||
nullifiableEntityKeys = new HashSet<>();
|
||||
}
|
||||
this.nullifiableEntityKeys.add( key );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isNullifiableEntityKeysEmpty() {
|
||||
return ( nullifiableEntityKeys == null || nullifiableEntityKeys.size() == 0 );
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getCollectionEntriesSize() {
|
||||
return collectionEntries == null ? 0 : collectionEntries.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionEntry removeCollectionEntry(PersistentCollection collection) {
|
||||
if ( collectionEntries == null ) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
return collectionEntries.remove( collection );
|
||||
}
|
||||
}
|
||||
|
||||
private void cleanUpInsertedKeysAfterTransaction() {
|
||||
if ( insertedKeysMap != null ) {
|
||||
insertedKeysMap.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// NATURAL ID RESOLUTION HANDLING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
private final NaturalIdXrefDelegate naturalIdXrefDelegate = new NaturalIdXrefDelegate( this );
|
||||
private NaturalIdXrefDelegate naturalIdXrefDelegate;
|
||||
|
||||
private NaturalIdXrefDelegate getNaturalIdXrefDelegate() {
|
||||
if ( naturalIdXrefDelegate == null ) {
|
||||
this.naturalIdXrefDelegate = new NaturalIdXrefDelegate( this );
|
||||
}
|
||||
return naturalIdXrefDelegate;
|
||||
}
|
||||
|
||||
private final NaturalIdHelper naturalIdHelper = new NaturalIdHelper() {
|
||||
@Override
|
||||
|
@ -1751,7 +1870,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
// from a single load event. The first put journal would come from the natural id resolution;
|
||||
// the second comes from the entity loading. In this condition, we want to avoid the multiple
|
||||
// 'put' stats incrementing.
|
||||
final boolean justAddedLocally = naturalIdXrefDelegate.cacheNaturalIdCrossReference( persister, id, naturalIdValues );
|
||||
final boolean justAddedLocally = getNaturalIdXrefDelegate().cacheNaturalIdCrossReference( persister, id, naturalIdValues );
|
||||
|
||||
if ( justAddedLocally && persister.hasNaturalIdCache() ) {
|
||||
managedSharedCacheEntries( persister, id, naturalIdValues, null, CachedNaturalIdValueSource.LOAD );
|
||||
|
@ -1774,7 +1893,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
final Object[] naturalIdValues = extractNaturalIdValues( state, persister );
|
||||
|
||||
// cache
|
||||
naturalIdXrefDelegate.cacheNaturalIdCrossReference( persister, id, naturalIdValues );
|
||||
getNaturalIdXrefDelegate().cacheNaturalIdCrossReference( persister, id, naturalIdValues );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1848,7 +1967,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
new AfterTransactionCompletionProcess() {
|
||||
@Override
|
||||
public void doAfterTransactionCompletion(boolean success, SharedSessionContractImplementor session) {
|
||||
if (success) {
|
||||
if ( success ) {
|
||||
final boolean put = naturalIdCacheAccessStrategy.afterInsert( session, naturalIdCacheKey, id );
|
||||
if ( put && statistics.isStatisticsEnabled() ) {
|
||||
statistics.naturalIdCachePut(
|
||||
|
@ -1914,7 +2033,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
LOG.debug( "Unexpected CachedNaturalIdValueSource [" + source + "]" );
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debug( "Unexpected CachedNaturalIdValueSource [" + source + "]" );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1929,7 +2050,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
persister = locateProperPersister( persister );
|
||||
final Object[] naturalIdValues = getNaturalIdValues( state, persister );
|
||||
|
||||
final Object[] localNaturalIdValues = naturalIdXrefDelegate.removeNaturalIdCrossReference(
|
||||
final Object[] localNaturalIdValues = getNaturalIdXrefDelegate().removeNaturalIdCrossReference(
|
||||
persister,
|
||||
id,
|
||||
naturalIdValues
|
||||
|
@ -1968,12 +2089,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public Object[] findCachedNaturalId(EntityPersister persister, Serializable pk) {
|
||||
return naturalIdXrefDelegate.findCachedNaturalId( locateProperPersister( persister ), pk );
|
||||
return getNaturalIdXrefDelegate().findCachedNaturalId( locateProperPersister( persister ), pk );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Serializable findCachedNaturalIdResolution(EntityPersister persister, Object[] naturalIdValues) {
|
||||
return naturalIdXrefDelegate.findCachedNaturalIdResolution( locateProperPersister( persister ), naturalIdValues );
|
||||
return getNaturalIdXrefDelegate().findCachedNaturalIdResolution( locateProperPersister( persister ), naturalIdValues );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -2011,7 +2132,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public Collection<Serializable> getCachedPkResolutions(EntityPersister entityPersister) {
|
||||
return naturalIdXrefDelegate.getCachedPkResolutions( entityPersister );
|
||||
return getNaturalIdXrefDelegate().getCachedPkResolutions( entityPersister );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -2024,6 +2145,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
persister = locateProperPersister( persister );
|
||||
|
||||
final Object[] naturalIdValuesFromCurrentObjectState = extractNaturalIdValues( entity, persister );
|
||||
final NaturalIdXrefDelegate naturalIdXrefDelegate = getNaturalIdXrefDelegate();
|
||||
final boolean changed = ! naturalIdXrefDelegate.sameAsCached(
|
||||
persister,
|
||||
pk,
|
||||
|
@ -2045,12 +2167,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
|
||||
@Override
|
||||
public void cleanupFromSynchronizations() {
|
||||
naturalIdXrefDelegate.unStashInvalidNaturalIdReferences();
|
||||
getNaturalIdXrefDelegate().unStashInvalidNaturalIdReferences();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleEviction(Object object, EntityPersister persister, Serializable identifier) {
|
||||
naturalIdXrefDelegate.removeNaturalIdCrossReference(
|
||||
getNaturalIdXrefDelegate().removeNaturalIdCrossReference(
|
||||
persister,
|
||||
identifier,
|
||||
findCachedNaturalId( persister, identifier )
|
||||
|
|
|
@ -82,7 +82,8 @@ public class JdbcCoordinatorImpl implements JdbcCoordinator {
|
|||
*/
|
||||
public JdbcCoordinatorImpl(
|
||||
Connection userSuppliedConnection,
|
||||
JdbcSessionOwner owner) {
|
||||
JdbcSessionOwner owner,
|
||||
JdbcServices jdbcServices) {
|
||||
this.isUserSuppliedConnection = userSuppliedConnection != null;
|
||||
|
||||
final ResourceRegistry resourceRegistry = new ResourceRegistryStandardImpl(
|
||||
|
@ -95,13 +96,12 @@ public class JdbcCoordinatorImpl implements JdbcCoordinator {
|
|||
this.logicalConnection = new LogicalConnectionManagedImpl(
|
||||
owner.getJdbcConnectionAccess(),
|
||||
owner.getJdbcSessionContext(),
|
||||
resourceRegistry
|
||||
resourceRegistry,
|
||||
jdbcServices
|
||||
);
|
||||
}
|
||||
this.owner = owner;
|
||||
this.jdbcServices = owner.getJdbcSessionContext()
|
||||
.getServiceRegistry()
|
||||
.getService( JdbcServices.class );
|
||||
this.jdbcServices = jdbcServices;
|
||||
}
|
||||
|
||||
private JdbcCoordinatorImpl(
|
||||
|
@ -223,7 +223,7 @@ public class JdbcCoordinatorImpl implements JdbcCoordinator {
|
|||
@Override
|
||||
public ResultSetReturn getResultSetReturn() {
|
||||
if ( resultSetExtractor == null ) {
|
||||
resultSetExtractor = new ResultSetReturnImpl( this );
|
||||
resultSetExtractor = new ResultSetReturnImpl( this, jdbcServices );
|
||||
}
|
||||
return resultSetExtractor;
|
||||
}
|
||||
|
|
|
@ -36,16 +36,9 @@ public class ResultSetReturnImpl implements ResultSetReturn {
|
|||
*
|
||||
* @param jdbcCoordinator The JdbcCoordinator
|
||||
*/
|
||||
public ResultSetReturnImpl(JdbcCoordinator jdbcCoordinator) {
|
||||
public ResultSetReturnImpl(JdbcCoordinator jdbcCoordinator, JdbcServices jdbcServices) {
|
||||
this.jdbcCoordinator = jdbcCoordinator;
|
||||
|
||||
final JdbcServices jdbcServices = jdbcCoordinator.getJdbcSessionOwner()
|
||||
.getJdbcSessionContext()
|
||||
.getServiceRegistry()
|
||||
.getService( JdbcServices.class );
|
||||
|
||||
this.dialect = jdbcServices.getDialect();
|
||||
|
||||
this.sqlStatementLogger = jdbcServices.getSqlStatementLogger();
|
||||
this.sqlExceptionHelper = jdbcServices.getSqlExceptionHelper();
|
||||
}
|
||||
|
|
|
@ -163,8 +163,8 @@ public class HQLQueryPlan implements Serializable {
|
|||
*/
|
||||
public String[] getSqlStrings() {
|
||||
List<String> sqlStrings = new ArrayList<>();
|
||||
for ( int i = 0; i < translators.length; i++ ) {
|
||||
sqlStrings.addAll( translators[i].collectSqlStrings() );
|
||||
for ( QueryTranslator translator : translators ) {
|
||||
sqlStrings.addAll( translator.collectSqlStrings() );
|
||||
}
|
||||
return ArrayHelper.toStringArray( sqlStrings );
|
||||
}
|
||||
|
|
|
@ -229,13 +229,13 @@ public class ActionQueue {
|
|||
}
|
||||
|
||||
public void clear() {
|
||||
for ( ListProvider listProvider : EXECUTABLE_LISTS_MAP.values() ) {
|
||||
EXECUTABLE_LISTS_MAP.forEach( (k,listProvider) -> {
|
||||
ExecutableList<?> l = listProvider.get( this );
|
||||
if( l != null ) {
|
||||
if ( l != null ) {
|
||||
l.clear();
|
||||
}
|
||||
}
|
||||
if( unresolvedInsertions != null ) {
|
||||
} );
|
||||
if ( unresolvedInsertions != null ) {
|
||||
unresolvedInsertions.clear();
|
||||
}
|
||||
}
|
||||
|
@ -267,7 +267,7 @@ public class ActionQueue {
|
|||
LOG.tracev( "Adding insert with non-nullable, transient entities; insert=[{0}], dependencies=[{1}]", insert,
|
||||
nonNullableTransientDependencies.toLoggableString( insert.getSession() ) );
|
||||
}
|
||||
if( unresolvedInsertions == null ) {
|
||||
if ( unresolvedInsertions == null ) {
|
||||
unresolvedInsertions = new UnresolvedEntityInsertActions();
|
||||
}
|
||||
unresolvedInsertions.addUnresolvedEntityInsertAction( insert, nonNullableTransientDependencies );
|
||||
|
@ -288,7 +288,7 @@ public class ActionQueue {
|
|||
if ( !insert.isVeto() ) {
|
||||
insert.makeEntityManaged();
|
||||
|
||||
if( unresolvedInsertions != null ) {
|
||||
if ( unresolvedInsertions != null ) {
|
||||
for ( AbstractEntityInsertAction resolvedAction : unresolvedInsertions.resolveDependentActions( insert.getInstance(), session ) ) {
|
||||
addResolvedEntityInsertAction( resolvedAction );
|
||||
}
|
||||
|
@ -390,8 +390,8 @@ public class ActionQueue {
|
|||
}
|
||||
|
||||
private void registerCleanupActions(Executable executable) {
|
||||
if( executable.getBeforeTransactionCompletionProcess() != null ) {
|
||||
if( beforeTransactionProcesses == null ) {
|
||||
if ( executable.getBeforeTransactionCompletionProcess() != null ) {
|
||||
if ( beforeTransactionProcesses == null ) {
|
||||
beforeTransactionProcesses = new BeforeTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
beforeTransactionProcesses.register( executable.getBeforeTransactionCompletionProcess() );
|
||||
|
@ -399,8 +399,8 @@ public class ActionQueue {
|
|||
if ( session.getFactory().getSessionFactoryOptions().isQueryCacheEnabled() ) {
|
||||
invalidateSpaces( convertTimestampSpaces( executable.getPropertySpaces() ) );
|
||||
}
|
||||
if( executable.getAfterTransactionCompletionProcess() != null ) {
|
||||
if( afterTransactionProcesses == null ) {
|
||||
if ( executable.getAfterTransactionCompletionProcess() != null ) {
|
||||
if ( afterTransactionProcesses == null ) {
|
||||
afterTransactionProcesses = new AfterTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
afterTransactionProcesses.register( executable.getAfterTransactionCompletionProcess() );
|
||||
|
@ -432,20 +432,20 @@ public class ActionQueue {
|
|||
* the first unresolved entity insert action.
|
||||
*/
|
||||
public void checkNoUnresolvedActionsAfterOperation() throws PropertyValueException {
|
||||
if(unresolvedInsertions != null) {
|
||||
if ( unresolvedInsertions != null ) {
|
||||
unresolvedInsertions.checkNoUnresolvedActionsAfterOperation();
|
||||
}
|
||||
}
|
||||
|
||||
public void registerProcess(AfterTransactionCompletionProcess process) {
|
||||
if( afterTransactionProcesses == null ) {
|
||||
if ( afterTransactionProcesses == null ) {
|
||||
afterTransactionProcesses = new AfterTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
afterTransactionProcesses.register( process );
|
||||
}
|
||||
|
||||
public void registerProcess(BeforeTransactionCompletionProcess process) {
|
||||
if( beforeTransactionProcesses == null ) {
|
||||
if ( beforeTransactionProcesses == null ) {
|
||||
beforeTransactionProcesses = new BeforeTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
beforeTransactionProcesses.register( process );
|
||||
|
@ -472,12 +472,12 @@ public class ActionQueue {
|
|||
throw new IllegalStateException( "About to execute actions, but there are unresolved entity insert actions." );
|
||||
}
|
||||
|
||||
for ( ListProvider listProvider : EXECUTABLE_LISTS_MAP.values() ) {
|
||||
EXECUTABLE_LISTS_MAP.forEach( (k,listProvider) -> {
|
||||
ExecutableList<?> l = listProvider.get( this );
|
||||
if ( l != null && !l.isEmpty() ) {
|
||||
executeActions( l );
|
||||
}
|
||||
}
|
||||
} );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -493,7 +493,7 @@ public class ActionQueue {
|
|||
}
|
||||
|
||||
private void prepareActions(ExecutableList<?> queue) throws HibernateException {
|
||||
if( queue == null ) {
|
||||
if ( queue == null ) {
|
||||
return;
|
||||
}
|
||||
for ( Executable executable : queue ) {
|
||||
|
@ -509,7 +509,7 @@ public class ActionQueue {
|
|||
public void afterTransactionCompletion(boolean success) {
|
||||
if ( !isTransactionCoordinatorShared ) {
|
||||
// Execute completion actions only in transaction owner (aka parent session).
|
||||
if( afterTransactionProcesses != null ) {
|
||||
if ( afterTransactionProcesses != null ) {
|
||||
afterTransactionProcesses.afterTransactionCompletion( success );
|
||||
}
|
||||
}
|
||||
|
@ -521,7 +521,7 @@ public class ActionQueue {
|
|||
public void beforeTransactionCompletion() {
|
||||
if ( !isTransactionCoordinatorShared ) {
|
||||
// Execute completion actions only in transaction owner (aka parent session).
|
||||
if( beforeTransactionProcesses != null ) {
|
||||
if ( beforeTransactionProcesses != null ) {
|
||||
beforeTransactionProcesses.beforeTransactionCompletion();
|
||||
}
|
||||
}
|
||||
|
@ -553,7 +553,7 @@ public class ActionQueue {
|
|||
return true;
|
||||
}
|
||||
}
|
||||
if(unresolvedInsertions == null) {
|
||||
if ( unresolvedInsertions == null ) {
|
||||
return false;
|
||||
}
|
||||
return areTablesToBeUpdated( unresolvedInsertions, tables );
|
||||
|
@ -604,14 +604,14 @@ public class ActionQueue {
|
|||
e.execute();
|
||||
}
|
||||
finally {
|
||||
if( e.getBeforeTransactionCompletionProcess() != null ) {
|
||||
if( beforeTransactionProcesses == null ) {
|
||||
if ( e.getBeforeTransactionCompletionProcess() != null ) {
|
||||
if ( beforeTransactionProcesses == null ) {
|
||||
beforeTransactionProcesses = new BeforeTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
beforeTransactionProcesses.register( e.getBeforeTransactionCompletionProcess() );
|
||||
}
|
||||
if( e.getAfterTransactionCompletionProcess() != null ) {
|
||||
if( afterTransactionProcesses == null ) {
|
||||
if ( e.getAfterTransactionCompletionProcess() != null ) {
|
||||
if ( afterTransactionProcesses == null ) {
|
||||
afterTransactionProcesses = new AfterTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
afterTransactionProcesses.register( e.getAfterTransactionCompletionProcess() );
|
||||
|
@ -657,7 +657,7 @@ public class ActionQueue {
|
|||
private void invalidateSpaces(String... spaces) {
|
||||
if ( spaces != null && spaces.length > 0 ) {
|
||||
for ( Serializable s : spaces ) {
|
||||
if( afterTransactionProcesses == null ) {
|
||||
if ( afterTransactionProcesses == null ) {
|
||||
afterTransactionProcesses = new AfterTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
afterTransactionProcesses.addSpaceToInvalidate( (String) s );
|
||||
|
@ -691,21 +691,21 @@ public class ActionQueue {
|
|||
}
|
||||
|
||||
public int numberOfCollectionRemovals() {
|
||||
if( collectionRemovals == null ) {
|
||||
if ( collectionRemovals == null ) {
|
||||
return 0;
|
||||
}
|
||||
return collectionRemovals.size();
|
||||
}
|
||||
|
||||
public int numberOfCollectionUpdates() {
|
||||
if( collectionUpdates == null ) {
|
||||
if ( collectionUpdates == null ) {
|
||||
return 0;
|
||||
}
|
||||
return collectionUpdates.size();
|
||||
}
|
||||
|
||||
public int numberOfCollectionCreations() {
|
||||
if( collectionCreations == null ) {
|
||||
if ( collectionCreations == null ) {
|
||||
return 0;
|
||||
}
|
||||
return collectionCreations.size();
|
||||
|
@ -718,24 +718,24 @@ public class ActionQueue {
|
|||
}
|
||||
|
||||
public int numberOfUpdates() {
|
||||
if( updates == null ) {
|
||||
if ( updates == null ) {
|
||||
return 0;
|
||||
}
|
||||
return updates.size();
|
||||
}
|
||||
|
||||
public int numberOfInsertions() {
|
||||
if( insertions == null ) {
|
||||
if ( insertions == null ) {
|
||||
return 0;
|
||||
}
|
||||
return insertions.size();
|
||||
}
|
||||
|
||||
public TransactionCompletionProcesses getTransactionCompletionProcesses() {
|
||||
if( beforeTransactionProcesses == null ) {
|
||||
if ( beforeTransactionProcesses == null ) {
|
||||
beforeTransactionProcesses = new BeforeTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
if( afterTransactionProcesses == null ) {
|
||||
if ( afterTransactionProcesses == null ) {
|
||||
afterTransactionProcesses = new AfterTransactionCompletionProcessQueue( session );
|
||||
}
|
||||
return new TransactionCompletionProcesses( beforeTransactionProcesses, afterTransactionProcesses );
|
||||
|
@ -758,16 +758,16 @@ public class ActionQueue {
|
|||
public void sortCollectionActions() {
|
||||
if ( isOrderUpdatesEnabled() ) {
|
||||
// sort the updates by fk
|
||||
if( collectionCreations != null ) {
|
||||
if ( collectionCreations != null ) {
|
||||
collectionCreations.sort();
|
||||
}
|
||||
if( collectionUpdates != null ) {
|
||||
if ( collectionUpdates != null ) {
|
||||
collectionUpdates.sort();
|
||||
}
|
||||
if( collectionQueuedOps != null ) {
|
||||
if ( collectionQueuedOps != null ) {
|
||||
collectionQueuedOps.sort();
|
||||
}
|
||||
if( collectionRemovals != null ) {
|
||||
if ( collectionRemovals != null ) {
|
||||
collectionRemovals.sort();
|
||||
}
|
||||
}
|
||||
|
@ -792,16 +792,16 @@ public class ActionQueue {
|
|||
}
|
||||
|
||||
public void clearFromFlushNeededCheck(int previousCollectionRemovalSize) {
|
||||
if( collectionCreations != null ) {
|
||||
if ( collectionCreations != null ) {
|
||||
collectionCreations.clear();
|
||||
}
|
||||
if( collectionUpdates != null ) {
|
||||
if ( collectionUpdates != null ) {
|
||||
collectionUpdates.clear();
|
||||
}
|
||||
if( collectionQueuedOps != null ) {
|
||||
if ( collectionQueuedOps != null ) {
|
||||
collectionQueuedOps.clear();
|
||||
}
|
||||
if( updates != null) {
|
||||
if ( updates != null) {
|
||||
updates.clear();
|
||||
}
|
||||
// collection deletions are a special case since update() can add
|
||||
|
@ -835,19 +835,19 @@ public class ActionQueue {
|
|||
rescuedEntity = initializer.getImplementation( session );
|
||||
}
|
||||
}
|
||||
if( deletions != null ) {
|
||||
if ( deletions != null ) {
|
||||
for ( int i = 0; i < deletions.size(); i++ ) {
|
||||
EntityDeleteAction action = deletions.get( i );
|
||||
if (action.getInstance() == rescuedEntity) {
|
||||
if ( action.getInstance() == rescuedEntity ) {
|
||||
deletions.remove( i );
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if( orphanRemovals != null ) {
|
||||
if ( orphanRemovals != null ) {
|
||||
for ( int i = 0; i < orphanRemovals.size(); i++ ) {
|
||||
EntityDeleteAction action = orphanRemovals.get( i );
|
||||
if (action.getInstance() == rescuedEntity) {
|
||||
if ( action.getInstance() == rescuedEntity ) {
|
||||
orphanRemovals.remove( i );
|
||||
return;
|
||||
}
|
||||
|
@ -864,14 +864,14 @@ public class ActionQueue {
|
|||
*/
|
||||
public void serialize(ObjectOutputStream oos) throws IOException {
|
||||
LOG.trace( "Serializing action-queue" );
|
||||
if( unresolvedInsertions == null ) {
|
||||
if ( unresolvedInsertions == null ) {
|
||||
unresolvedInsertions = new UnresolvedEntityInsertActions();
|
||||
}
|
||||
unresolvedInsertions.serialize( oos );
|
||||
|
||||
for ( ListProvider p : EXECUTABLE_LISTS_MAP.values() ) {
|
||||
ExecutableList<?> l = p.get( this );
|
||||
if( l == null ) {
|
||||
if ( l == null ) {
|
||||
oos.writeBoolean( false );
|
||||
}
|
||||
else {
|
||||
|
@ -902,8 +902,8 @@ public class ActionQueue {
|
|||
for ( ListProvider provider : EXECUTABLE_LISTS_MAP.values() ) {
|
||||
ExecutableList<?> l = provider.get( rtn );
|
||||
boolean notNull = ois.readBoolean();
|
||||
if( notNull ) {
|
||||
if(l == null) {
|
||||
if ( notNull ) {
|
||||
if ( l == null ) {
|
||||
l = provider.init( rtn );
|
||||
}
|
||||
l.readExternal( ois );
|
||||
|
@ -1218,7 +1218,7 @@ public class ActionQueue {
|
|||
BatchIdentifier nextBatchIdentifier = latestBatches.get( j );
|
||||
|
||||
if ( batchIdentifier.hasParent( nextBatchIdentifier ) ) {
|
||||
if( nextBatchIdentifier.hasParent( batchIdentifier ) ) {
|
||||
if ( nextBatchIdentifier.hasParent( batchIdentifier ) ) {
|
||||
//cycle detected, no need to continue
|
||||
break sort;
|
||||
}
|
||||
|
@ -1232,7 +1232,7 @@ public class ActionQueue {
|
|||
}
|
||||
sorted = true;
|
||||
}
|
||||
while ( !sorted && iterations <= maxIterations);
|
||||
while ( !sorted && iterations <= maxIterations );
|
||||
|
||||
if ( iterations > maxIterations ) {
|
||||
LOG.warn( "The batch containing " + latestBatches.size() + " statements could not be sorted after " + maxIterations + " iterations. " +
|
||||
|
|
|
@ -236,8 +236,8 @@ public class NamedSQLQueryDefinition extends NamedQueryDefinition {
|
|||
allQueryReturns[i] = this.queryReturns[i];
|
||||
}
|
||||
|
||||
for ( int j = 0; j < queryReturnsToAdd.length; j++ ) {
|
||||
allQueryReturns[i] = queryReturnsToAdd[j];
|
||||
for ( NativeSQLQueryReturn queryReturnsToAdd1 : queryReturnsToAdd ) {
|
||||
allQueryReturns[i] = queryReturnsToAdd1;
|
||||
i++;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ import java.io.Serializable;
|
|||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.LockMode;
|
||||
|
@ -477,7 +479,9 @@ public interface PersistenceContext {
|
|||
|
||||
/**
|
||||
* Retrieve the set of EntityKeys representing nullifiable references
|
||||
* @deprecated Use {@link #containsNullifiableEntityKey(Supplier)} or {@link #registerNullifiableEntityKey(EntityKey)} or {@link #isNullifiableEntityKeysEmpty()}
|
||||
*/
|
||||
@Deprecated
|
||||
HashSet getNullifiableEntityKeys();
|
||||
|
||||
/**
|
||||
|
@ -507,9 +511,18 @@ public interface PersistenceContext {
|
|||
|
||||
/**
|
||||
* Get the mapping from collection instance to collection entry
|
||||
* @deprecated use {@link #removeCollectionEntry(PersistentCollection)} or {@link #getCollectionEntriesSize()}, {@link #forEachCollectionEntry(BiConsumer,boolean)}.
|
||||
*/
|
||||
@Deprecated
|
||||
Map getCollectionEntries();
|
||||
|
||||
/**
|
||||
* Execute some action on each entry of the collectionEntries map, optionally iterating on a defensive copy.
|
||||
* @param action the lambda to apply on each PersistentCollection,CollectionEntry map entry of the PersistenceContext.
|
||||
* @param concurrent set this to false for improved efficiency, but that would make it illegal to make changes to the underlying collectionEntries map.
|
||||
*/
|
||||
void forEachCollectionEntry(BiConsumer<PersistentCollection,CollectionEntry> action, boolean concurrent);
|
||||
|
||||
/**
|
||||
* Get the mapping from collection key to collection instance
|
||||
*/
|
||||
|
@ -720,6 +733,42 @@ public interface PersistenceContext {
|
|||
*/
|
||||
boolean wasInsertedDuringTransaction(EntityPersister persister, Serializable id);
|
||||
|
||||
/**
|
||||
* Checks if a certain {@link EntityKey} was registered as nullifiable on this {@link PersistenceContext}.
|
||||
*
|
||||
* @param sek a supplier for the EntityKey; this allows to not always needing to create the key;
|
||||
* for example is the map is known to be empty there is no need to create one to check.
|
||||
* @return true if the EntityKey had been registered before using {@link #registerNullifiableEntityKey(EntityKey)}
|
||||
* @see #registerNullifiableEntityKey(EntityKey)
|
||||
*/
|
||||
boolean containsNullifiableEntityKey(Supplier<EntityKey> sek);
|
||||
|
||||
/**
|
||||
* Registers an {@link EntityKey} as nullifiable on this {@link PersistenceContext}.
|
||||
* @param key
|
||||
*/
|
||||
void registerNullifiableEntityKey(EntityKey key);
|
||||
|
||||
/**
|
||||
* @return true if no {@link EntityKey} was registered as nullifiable on this {@link PersistenceContext}.
|
||||
* @see #registerNullifiableEntityKey(EntityKey)
|
||||
*/
|
||||
boolean isNullifiableEntityKeysEmpty();
|
||||
|
||||
/**
|
||||
* The size of the internal map storing all collection entries.
|
||||
* (The map is not exposed directly, but the size is often useful)
|
||||
* @return the size
|
||||
*/
|
||||
int getCollectionEntriesSize();
|
||||
|
||||
/**
|
||||
* Remove a {@link PersistentCollection} from the {@link PersistenceContext}.
|
||||
* @param collection the collection to remove
|
||||
* @return the matching {@link CollectionEntry}, if any was removed.
|
||||
*/
|
||||
CollectionEntry removeCollectionEntry(PersistentCollection collection);
|
||||
|
||||
/**
|
||||
* Provides centralized access to natural-id-related functionality.
|
||||
*/
|
||||
|
|
|
@ -40,12 +40,12 @@ public class JtaPlatformInitiator implements StandardServiceInitiator<JtaPlatfor
|
|||
JtaPlatform platform = registry.getService( StrategySelector.class ).resolveStrategy( JtaPlatform.class, setting );
|
||||
|
||||
if ( platform == null ) {
|
||||
LOG.debugf( "No JtaPlatform was specified, checking resolver" );
|
||||
LOG.debug( "No JtaPlatform was specified, checking resolver" );
|
||||
platform = registry.getService( JtaPlatformResolver.class ).resolveJtaPlatform( configurationValues, registry );
|
||||
}
|
||||
|
||||
if ( platform == null ) {
|
||||
LOG.debugf( "No JtaPlatform was specified, checking resolver" );
|
||||
LOG.debug( "No JtaPlatform was specified, checking resolver" );
|
||||
platform = getFallbackProvider( configurationValues, registry );
|
||||
}
|
||||
|
||||
|
|
|
@ -11,11 +11,11 @@ import java.util.IdentityHashMap;
|
|||
import java.util.Map;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.Interceptor;
|
||||
import org.hibernate.action.internal.CollectionRecreateAction;
|
||||
import org.hibernate.action.internal.CollectionRemoveAction;
|
||||
import org.hibernate.action.internal.CollectionUpdateAction;
|
||||
import org.hibernate.action.internal.QueuedOperationCollectionAction;
|
||||
import org.hibernate.collection.spi.PersistentCollection;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.internal.CascadePoint;
|
||||
import org.hibernate.engine.internal.Collections;
|
||||
|
@ -23,7 +23,6 @@ import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
|
|||
import org.hibernate.engine.spi.ActionQueue;
|
||||
import org.hibernate.engine.spi.CascadingAction;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.CollectionEntry;
|
||||
import org.hibernate.engine.spi.CollectionKey;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.PersistenceContext;
|
||||
|
@ -38,7 +37,6 @@ import org.hibernate.event.spi.FlushEntityEventListener;
|
|||
import org.hibernate.event.spi.FlushEvent;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.internal.util.EntityPrinter;
|
||||
import org.hibernate.internal.util.collections.IdentityMap;
|
||||
import org.hibernate.internal.util.collections.LazyIterator;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
||||
|
@ -125,7 +123,7 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
|||
session.getActionQueue().numberOfCollectionCreations(),
|
||||
session.getActionQueue().numberOfCollectionUpdates(),
|
||||
session.getActionQueue().numberOfCollectionRemovals(),
|
||||
persistenceContext.getCollectionEntries().size()
|
||||
persistenceContext.getCollectionEntriesSize()
|
||||
);
|
||||
new EntityPrinter( session.getFactory() ).toString(
|
||||
persistenceContext.getEntitiesByKey().entrySet()
|
||||
|
@ -193,11 +191,9 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
|||
// and reset reached, doupdate, etc.
|
||||
|
||||
LOG.debug( "Dirty checking collections" );
|
||||
|
||||
for ( Map.Entry<PersistentCollection,CollectionEntry> entry :
|
||||
IdentityMap.concurrentEntries( (Map<PersistentCollection,CollectionEntry>) persistenceContext.getCollectionEntries() ) ) {
|
||||
entry.getValue().preFlush( entry.getKey() );
|
||||
}
|
||||
persistenceContext.forEachCollectionEntry( (pc,ce) -> {
|
||||
ce.preFlush( pc );
|
||||
}, true );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -252,78 +248,70 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
|||
private int flushCollections(final EventSource session, final PersistenceContext persistenceContext) throws HibernateException {
|
||||
LOG.trace( "Processing unreferenced collections" );
|
||||
|
||||
final Map.Entry<PersistentCollection,CollectionEntry>[] entries = IdentityMap.concurrentEntries(
|
||||
(Map<PersistentCollection,CollectionEntry>) persistenceContext.getCollectionEntries()
|
||||
);
|
||||
final int count = persistenceContext.getCollectionEntriesSize();
|
||||
|
||||
final int count = entries.length;
|
||||
|
||||
for ( Map.Entry<PersistentCollection,CollectionEntry> me : entries ) {
|
||||
CollectionEntry ce = me.getValue();
|
||||
if ( !ce.isReached() && !ce.isIgnore() ) {
|
||||
Collections.processUnreachableCollection( me.getKey(), session );
|
||||
}
|
||||
}
|
||||
persistenceContext.forEachCollectionEntry(
|
||||
(persistentCollection, collectionEntry) -> {
|
||||
if ( !collectionEntry.isReached() && !collectionEntry.isIgnore() ) {
|
||||
Collections.processUnreachableCollection( persistentCollection, session );
|
||||
}
|
||||
}, true );
|
||||
|
||||
// Schedule updates to collections:
|
||||
|
||||
LOG.trace( "Scheduling collection removes/(re)creates/updates" );
|
||||
|
||||
ActionQueue actionQueue = session.getActionQueue();
|
||||
for ( Map.Entry<PersistentCollection,CollectionEntry> me :
|
||||
IdentityMap.concurrentEntries( (Map<PersistentCollection,CollectionEntry>) persistenceContext.getCollectionEntries() ) ) {
|
||||
PersistentCollection coll = me.getKey();
|
||||
CollectionEntry ce = me.getValue();
|
||||
|
||||
if ( ce.isDorecreate() ) {
|
||||
session.getInterceptor().onCollectionRecreate( coll, ce.getCurrentKey() );
|
||||
actionQueue.addAction(
|
||||
new CollectionRecreateAction(
|
||||
coll,
|
||||
ce.getCurrentPersister(),
|
||||
ce.getCurrentKey(),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
if ( ce.isDoremove() ) {
|
||||
session.getInterceptor().onCollectionRemove( coll, ce.getLoadedKey() );
|
||||
actionQueue.addAction(
|
||||
new CollectionRemoveAction(
|
||||
coll,
|
||||
ce.getLoadedPersister(),
|
||||
ce.getLoadedKey(),
|
||||
ce.isSnapshotEmpty(coll),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
if ( ce.isDoupdate() ) {
|
||||
session.getInterceptor().onCollectionUpdate( coll, ce.getLoadedKey() );
|
||||
actionQueue.addAction(
|
||||
new CollectionUpdateAction(
|
||||
coll,
|
||||
ce.getLoadedPersister(),
|
||||
ce.getLoadedKey(),
|
||||
ce.isSnapshotEmpty(coll),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// todo : I'm not sure the !wasInitialized part should really be part of this check
|
||||
if ( !coll.wasInitialized() && coll.hasQueuedOperations() ) {
|
||||
actionQueue.addAction(
|
||||
new QueuedOperationCollectionAction(
|
||||
coll,
|
||||
ce.getLoadedPersister(),
|
||||
ce.getLoadedKey(),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
final ActionQueue actionQueue = session.getActionQueue();
|
||||
final Interceptor interceptor = session.getInterceptor();
|
||||
persistenceContext.forEachCollectionEntry(
|
||||
(coll, ce) -> {
|
||||
if ( ce.isDorecreate() ) {
|
||||
interceptor.onCollectionRecreate( coll, ce.getCurrentKey() );
|
||||
actionQueue.addAction(
|
||||
new CollectionRecreateAction(
|
||||
coll,
|
||||
ce.getCurrentPersister(),
|
||||
ce.getCurrentKey(),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
if ( ce.isDoremove() ) {
|
||||
interceptor.onCollectionRemove( coll, ce.getLoadedKey() );
|
||||
actionQueue.addAction(
|
||||
new CollectionRemoveAction(
|
||||
coll,
|
||||
ce.getLoadedPersister(),
|
||||
ce.getLoadedKey(),
|
||||
ce.isSnapshotEmpty( coll ),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
if ( ce.isDoupdate() ) {
|
||||
interceptor.onCollectionUpdate( coll, ce.getLoadedKey() );
|
||||
actionQueue.addAction(
|
||||
new CollectionUpdateAction(
|
||||
coll,
|
||||
ce.getLoadedPersister(),
|
||||
ce.getLoadedKey(),
|
||||
ce.isSnapshotEmpty( coll ),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
// todo : I'm not sure the !wasInitialized part should really be part of this check
|
||||
if ( !coll.wasInitialized() && coll.hasQueuedOperations() ) {
|
||||
actionQueue.addAction(
|
||||
new QueuedOperationCollectionAction(
|
||||
coll,
|
||||
ce.getLoadedPersister(),
|
||||
ce.getLoadedKey(),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
}, true );
|
||||
|
||||
actionQueue.sortCollectionActions();
|
||||
|
||||
|
@ -387,27 +375,25 @@ public abstract class AbstractFlushingEventListener implements JpaBootstrapSensi
|
|||
// the batch fetching queues should also be cleared - especially the collection batch fetching one
|
||||
persistenceContext.getBatchFetchQueue().clear();
|
||||
|
||||
for ( Map.Entry<PersistentCollection, CollectionEntry> me : IdentityMap.concurrentEntries( persistenceContext.getCollectionEntries() ) ) {
|
||||
CollectionEntry collectionEntry = me.getValue();
|
||||
PersistentCollection persistentCollection = me.getKey();
|
||||
collectionEntry.postFlush(persistentCollection);
|
||||
if ( collectionEntry.getLoadedPersister() == null ) {
|
||||
//if the collection is dereferenced, unset its session reference and remove from the session cache
|
||||
//iter.remove(); //does not work, since the entrySet is not backed by the set
|
||||
persistentCollection.unsetSession( session );
|
||||
persistenceContext.getCollectionEntries()
|
||||
.remove(persistentCollection);
|
||||
}
|
||||
else {
|
||||
//otherwise recreate the mapping between the collection and its key
|
||||
CollectionKey collectionKey = new CollectionKey(
|
||||
collectionEntry.getLoadedPersister(),
|
||||
collectionEntry.getLoadedKey()
|
||||
);
|
||||
persistenceContext.getCollectionsByKey().put(collectionKey, persistentCollection);
|
||||
}
|
||||
}
|
||||
|
||||
persistenceContext.forEachCollectionEntry(
|
||||
(persistentCollection, collectionEntry) -> {
|
||||
collectionEntry.postFlush( persistentCollection );
|
||||
if ( collectionEntry.getLoadedPersister() == null ) {
|
||||
//if the collection is dereferenced, unset its session reference and remove from the session cache
|
||||
//iter.remove(); //does not work, since the entrySet is not backed by the set
|
||||
persistentCollection.unsetSession( session );
|
||||
persistenceContext.removeCollectionEntry( persistentCollection );
|
||||
}
|
||||
else {
|
||||
//otherwise recreate the mapping between the collection and its key
|
||||
CollectionKey collectionKey = new CollectionKey(
|
||||
collectionEntry.getLoadedPersister(),
|
||||
collectionEntry.getLoadedKey()
|
||||
);
|
||||
persistenceContext.getCollectionsByKey().put( collectionKey, persistentCollection );
|
||||
}
|
||||
}, true
|
||||
);
|
||||
}
|
||||
|
||||
protected void postPostFlush(SessionImplementor session) {
|
||||
|
|
|
@ -50,6 +50,7 @@ public class DefaultAutoFlushEventListener extends AbstractFlushingEventListener
|
|||
flushEverythingToExecutions( event );
|
||||
if ( flushIsReallyNeeded( event, source ) ) {
|
||||
LOG.trace( "Need to execute flush" );
|
||||
event.setFlushRequired( true );
|
||||
|
||||
// note: performExecutions() clears all collectionXxxxtion
|
||||
// collections (the collection actions) in the session
|
||||
|
@ -65,10 +66,9 @@ public class DefaultAutoFlushEventListener extends AbstractFlushingEventListener
|
|||
}
|
||||
else {
|
||||
LOG.trace( "Don't need to execute flush" );
|
||||
event.setFlushRequired( false );
|
||||
actionQueue.clearFromFlushNeededCheck( oldSize );
|
||||
}
|
||||
|
||||
event.setFlushRequired( flushIsReallyNeeded( event, source ) );
|
||||
}
|
||||
}
|
||||
finally {
|
||||
|
@ -89,6 +89,6 @@ public class DefaultAutoFlushEventListener extends AbstractFlushingEventListener
|
|||
return !source.getHibernateFlushMode().lessThan( FlushMode.AUTO )
|
||||
&& source.getDontFlushFromFind() == 0
|
||||
&& ( persistenceContext.getNumberOfManagedEntities() > 0 ||
|
||||
persistenceContext.getCollectionEntries().size() > 0 );
|
||||
persistenceContext.getCollectionEntriesSize() > 0 );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -287,7 +287,7 @@ public class DefaultDeleteEventListener implements DeleteEventListener, Callback
|
|||
|
||||
new ForeignKeys.Nullifier( entity, true, false, session, persister ).nullifyTransientReferences( entityEntry.getDeletedState() );
|
||||
new Nullability( session ).checkNullability( entityEntry.getDeletedState(), persister, Nullability.NullabilityCheckType.DELETE );
|
||||
persistenceContext.getNullifiableEntityKeys().add( key );
|
||||
persistenceContext.registerNullifiableEntityKey( key );
|
||||
|
||||
if ( isOrphanRemovalBeforeUpdates ) {
|
||||
// TODO: The removeOrphan concept is a temporary "hack" for HHH-6484. This should be removed once action/task
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
package org.hibernate.event.internal;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.spi.ActionQueue;
|
||||
import org.hibernate.event.spi.DirtyCheckEvent;
|
||||
import org.hibernate.event.spi.DirtyCheckEventListener;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
|
@ -29,11 +30,12 @@ public class DefaultDirtyCheckEventListener extends AbstractFlushingEventListene
|
|||
* @throws HibernateException
|
||||
*/
|
||||
public void onDirtyCheck(DirtyCheckEvent event) throws HibernateException {
|
||||
int oldSize = event.getSession().getActionQueue().numberOfCollectionRemovals();
|
||||
final ActionQueue actionQueue = event.getSession().getActionQueue();
|
||||
int oldSize = actionQueue.numberOfCollectionRemovals();
|
||||
|
||||
try {
|
||||
flushEverythingToExecutions(event);
|
||||
boolean wasNeeded = event.getSession().getActionQueue().hasAnyQueuedActions();
|
||||
boolean wasNeeded = actionQueue.hasAnyQueuedActions();
|
||||
if ( wasNeeded ) {
|
||||
LOG.debug( "Session dirty" );
|
||||
}
|
||||
|
@ -43,7 +45,7 @@ public class DefaultDirtyCheckEventListener extends AbstractFlushingEventListene
|
|||
event.setDirty( wasNeeded );
|
||||
}
|
||||
finally {
|
||||
event.getSession().getActionQueue().clearFromFlushNeededCheck( oldSize );
|
||||
actionQueue.clearFromFlushNeededCheck( oldSize );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,11 +17,14 @@ import org.hibernate.StaleObjectStateException;
|
|||
import org.hibernate.action.internal.DelayedPostInsertIdentifier;
|
||||
import org.hibernate.action.internal.EntityUpdateAction;
|
||||
import org.hibernate.bytecode.enhance.spi.LazyPropertyInitializer;
|
||||
import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLazinessInterceptor;
|
||||
import org.hibernate.engine.internal.Nullability;
|
||||
import org.hibernate.engine.internal.Versioning;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.PersistenceContext;
|
||||
import org.hibernate.engine.spi.PersistentAttributeInterceptable;
|
||||
import org.hibernate.engine.spi.PersistentAttributeInterceptor;
|
||||
import org.hibernate.engine.spi.SelfDirtinessTracker;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.engine.spi.Status;
|
||||
|
@ -84,14 +87,24 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
|
|||
|
||||
private void checkNaturalId(
|
||||
EntityPersister persister,
|
||||
Object entity,
|
||||
EntityEntry entry,
|
||||
Object[] current,
|
||||
Object[] loaded,
|
||||
SessionImplementor session) {
|
||||
if ( entity instanceof PersistentAttributeInterceptable ) {
|
||||
final PersistentAttributeInterceptor interceptor = ( (PersistentAttributeInterceptable) entity ).$$_hibernate_getInterceptor();
|
||||
if ( interceptor instanceof EnhancementAsProxyLazinessInterceptor ) {
|
||||
// EARLY EXIT!!!
|
||||
// nothing to check - the entity is an un-initialized enhancement-as-proxy reference
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if ( persister.hasNaturalIdentifier() && entry.getStatus() != Status.READ_ONLY ) {
|
||||
if ( !persister.getEntityMetamodel().hasImmutableNaturalId() ) {
|
||||
// SHORT-CUT: if the natural id is mutable (!immutable), no need to do the below checks
|
||||
// EARLY EXIT!!!
|
||||
// the natural id is mutable (!immutable), no need to do the below checks
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -115,7 +128,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
|
|||
if ( !propertyType.isEqual( current[naturalIdentifierPropertyIndex], snapshot[i] ) ) {
|
||||
throw new HibernateException(
|
||||
String.format(
|
||||
"An immutable natural identifier of entity %s was altered from %s to %s",
|
||||
"An immutable natural identifier of entity %s was altered from `%s` to `%s`",
|
||||
persister.getEntityName(),
|
||||
propertyTypes[naturalIdentifierPropertyIndex].toLoggableString(
|
||||
snapshot[i],
|
||||
|
@ -191,7 +204,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
|
|||
// grab its current state
|
||||
values = persister.getPropertyValues( entity );
|
||||
|
||||
checkNaturalId( persister, entry, values, loadedState, session );
|
||||
checkNaturalId( persister, entity, entry, values, loadedState, session );
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public class DefaultFlushEventListener extends AbstractFlushingEventListener imp
|
|||
final PersistenceContext persistenceContext = source.getPersistenceContextInternal();
|
||||
|
||||
if ( persistenceContext.getNumberOfManagedEntities() > 0 ||
|
||||
persistenceContext.getCollectionEntries().size() > 0 ) {
|
||||
persistenceContext.getCollectionEntriesSize() > 0 ) {
|
||||
|
||||
try {
|
||||
source.getEventListenerManager().flushStart();
|
||||
|
|
|
@ -300,7 +300,7 @@ public class DefaultLoadEventListener implements LoadEventListener {
|
|||
// entities with subclasses that define a ProxyFactory can create
|
||||
// a HibernateProxy so long as NO_PROXY was not specified.
|
||||
if ( event.getShouldUnwrapProxy() != null && event.getShouldUnwrapProxy() ) {
|
||||
LOG.debugf( "Ignoring NO_PROXY for to-one association with subclasses to honor laziness" );
|
||||
LOG.debug( "Ignoring NO_PROXY for to-one association with subclasses to honor laziness" );
|
||||
}
|
||||
return createProxy( event, persister, keyToLoad, persistenceContext );
|
||||
}
|
||||
|
|
|
@ -359,7 +359,7 @@ public class DefaultMergeEventListener extends AbstractSaveEventListener impleme
|
|||
Object managed,
|
||||
EntityPersister persister,
|
||||
EventSource source) {
|
||||
if ( incoming instanceof HibernateProxy ) {
|
||||
if ( managed instanceof HibernateProxy ) {
|
||||
return source.getPersistenceContextInternal().unproxy( managed );
|
||||
}
|
||||
|
||||
|
|
|
@ -20,15 +20,18 @@ import org.hibernate.cache.spi.access.EntityDataAccess;
|
|||
import org.hibernate.cache.spi.access.SoftLock;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.internal.CascadePoint;
|
||||
import org.hibernate.engine.spi.ActionQueue;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.PersistenceContext;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.event.spi.EventSource;
|
||||
import org.hibernate.event.spi.RefreshEvent;
|
||||
import org.hibernate.event.spi.RefreshEventListener;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.metamodel.spi.MetamodelImplementor;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
|
@ -239,20 +242,23 @@ public class DefaultRefreshEventListener implements RefreshEventListener {
|
|||
|
||||
private void evictCachedCollections(Type[] types, Serializable id, EventSource source)
|
||||
throws HibernateException {
|
||||
final ActionQueue actionQueue = source.getActionQueue();
|
||||
final SessionFactoryImplementor factory = source.getFactory();
|
||||
final MetamodelImplementor metamodel = factory.getMetamodel();
|
||||
for ( Type type : types ) {
|
||||
if ( type.isCollectionType() ) {
|
||||
CollectionPersister collectionPersister = source.getFactory().getMetamodel().collectionPersister( ( (CollectionType) type ).getRole() );
|
||||
CollectionPersister collectionPersister = metamodel.collectionPersister( ( (CollectionType) type ).getRole() );
|
||||
if ( collectionPersister.hasCache() ) {
|
||||
final CollectionDataAccess cache = collectionPersister.getCacheAccessStrategy();
|
||||
final Object ck = cache.generateCacheKey(
|
||||
id,
|
||||
collectionPersister,
|
||||
source.getFactory(),
|
||||
factory,
|
||||
source.getTenantIdentifier()
|
||||
);
|
||||
final SoftLock lock = cache.lockItem( source, ck, null );
|
||||
cache.remove( source, ck );
|
||||
source.getActionQueue().registerProcess( (success, session) -> cache.unlockItem( session, ck, lock ) );
|
||||
actionQueue.registerProcess( (success, session) -> cache.unlockItem( session, ck, lock ) );
|
||||
}
|
||||
}
|
||||
else if ( type.isComponentType() ) {
|
||||
|
|
|
@ -35,15 +35,15 @@ public class EntityCopyObserverFactoryInitiator implements StandardServiceInitia
|
|||
public EntityCopyObserverFactory initiateService(final Map configurationValues, final ServiceRegistryImplementor registry) {
|
||||
final Object value = getConfigurationValue( configurationValues );
|
||||
if ( value.equals( EntityCopyNotAllowedObserver.SHORT_NAME ) || value.equals( EntityCopyNotAllowedObserver.class.getName() ) ) {
|
||||
LOG.debugf( "Configured EntityCopyObserver strategy: " + EntityCopyNotAllowedObserver.SHORT_NAME );
|
||||
LOG.debugf( "Configured EntityCopyObserver strategy: %s", EntityCopyNotAllowedObserver.SHORT_NAME );
|
||||
return EntityCopyNotAllowedObserver.FACTORY_OF_SELF;
|
||||
}
|
||||
else if ( value.equals( EntityCopyAllowedObserver.SHORT_NAME ) || value.equals( EntityCopyAllowedObserver.class.getName() ) ) {
|
||||
LOG.debugf( "Configured EntityCopyObserver strategy: " + EntityCopyAllowedObserver.SHORT_NAME );
|
||||
LOG.debugf( "Configured EntityCopyObserver strategy: %s", EntityCopyAllowedObserver.SHORT_NAME );
|
||||
return EntityCopyAllowedObserver.FACTORY_OF_SELF;
|
||||
}
|
||||
else if ( value.equals( EntityCopyAllowedLoggedObserver.SHORT_NAME ) || value.equals( EntityCopyAllowedLoggedObserver.class.getName() ) ) {
|
||||
LOG.debugf( "Configured EntityCopyObserver strategy: " + EntityCopyAllowedLoggedObserver.SHORT_NAME );
|
||||
LOG.debugf( "Configured EntityCopyObserver strategy: %s", EntityCopyAllowedLoggedObserver.SHORT_NAME );
|
||||
return EntityCopyAllowedLoggedObserver.FACTORY_OF_SELF;
|
||||
}
|
||||
else {
|
||||
|
@ -52,7 +52,7 @@ public class EntityCopyObserverFactoryInitiator implements StandardServiceInitia
|
|||
//and that they are indeed of the right type.
|
||||
EntityCopyObserver exampleInstance = registry.getService( StrategySelector.class ).resolveStrategy( EntityCopyObserver.class, value );
|
||||
Class observerType = exampleInstance.getClass();
|
||||
LOG.debugf( "Configured EntityCopyObserver is a custom implementation of type " + observerType.getName() );
|
||||
LOG.debugf( "Configured EntityCopyObserver is a custom implementation of type %s", observerType.getName() );
|
||||
return new EntityObserversFactoryFromClass( observerType );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ public class EvictVisitor extends AbstractVisitor {
|
|||
|
||||
private void evictCollection(PersistentCollection collection) {
|
||||
final PersistenceContext persistenceContext = getSession().getPersistenceContextInternal();
|
||||
CollectionEntry ce = (CollectionEntry) persistenceContext.getCollectionEntries().remove(collection);
|
||||
CollectionEntry ce = persistenceContext.removeCollectionEntry( collection );
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debugf(
|
||||
"Evicting collection: %s",
|
||||
|
|
|
@ -6,14 +6,19 @@
|
|||
*/
|
||||
package org.hibernate.event.service.internal;
|
||||
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.hibernate.event.service.spi.DuplicationStrategy;
|
||||
import org.hibernate.event.service.spi.EventActionWithParameter;
|
||||
import org.hibernate.event.service.spi.EventListenerGroup;
|
||||
import org.hibernate.event.service.spi.EventListenerRegistrationException;
|
||||
import org.hibernate.event.service.spi.JpaBootstrapSensitive;
|
||||
|
@ -22,13 +27,22 @@ import org.hibernate.jpa.event.spi.CallbackRegistryConsumer;
|
|||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
* @author Sanne Grinovero
|
||||
*/
|
||||
class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
||||
private EventType<T> eventType;
|
||||
private final EventListenerRegistryImpl listenerRegistry;
|
||||
|
||||
private final Set<DuplicationStrategy> duplicationStrategies = new LinkedHashSet<>();
|
||||
private List<T> listeners;
|
||||
|
||||
// Performance: make sure iteration on this type is efficient; in particular we do not want to allocate iterators,
|
||||
// not having to capture state in lambdas.
|
||||
// So we keep the listeners in both a List (for convenience) and in an array (for iteration). Make sure
|
||||
// their content stays in synch!
|
||||
private T[] listeners = null;
|
||||
|
||||
//Update both fields when making changes!
|
||||
private List<T> listenersAsList;
|
||||
|
||||
public EventListenerGroupImpl(EventType<T> eventType, EventListenerRegistryImpl listenerRegistry) {
|
||||
this.eventType = eventType;
|
||||
|
@ -62,7 +76,8 @@ class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
|||
|
||||
@Override
|
||||
public int count() {
|
||||
return listeners == null ? 0 : listeners.size();
|
||||
final T[] ls = listeners;
|
||||
return ls == null ? 0 : ls.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -70,8 +85,38 @@ class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
|||
if ( duplicationStrategies != null ) {
|
||||
duplicationStrategies.clear();
|
||||
}
|
||||
if ( listeners != null ) {
|
||||
listeners.clear();
|
||||
listeners = null;
|
||||
listenersAsList = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final <U> void fireLazyEventOnEachListener(final Supplier<U> eventSupplier, final BiConsumer<T,U> actionOnEvent) {
|
||||
final T[] ls = listeners;
|
||||
if ( ls != null && ls.length != 0 ) {
|
||||
final U event = eventSupplier.get();
|
||||
for ( T listener : ls ) {
|
||||
actionOnEvent.accept( listener, event );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final <U> void fireEventOnEachListener(final U event, final BiConsumer<T,U> actionOnEvent) {
|
||||
final T[] ls = listeners;
|
||||
if ( ls != null ) {
|
||||
for ( T listener : ls ) {
|
||||
actionOnEvent.accept( listener, event );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public <U,X> void fireEventOnEachListener(final U event, final X parameter, final EventActionWithParameter<T, U, X> actionOnEvent) {
|
||||
final T[] ls = listeners;
|
||||
if ( ls != null ) {
|
||||
for ( T listener : ls ) {
|
||||
actionOnEvent.applyEventToListener( listener, event, parameter );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,22 +127,44 @@ class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
|||
|
||||
/**
|
||||
* Implementation note: should be final for performance reasons.
|
||||
* @deprecated this is not the most efficient way for iterating the event listeners.
|
||||
* See {@link #fireEventOnEachListener(Object, BiConsumer)} and co. for better alternatives.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public final Iterable<T> listeners() {
|
||||
return listeners == null ? Collections.EMPTY_LIST : listeners;
|
||||
final List<T> ls = listenersAsList;
|
||||
return ls == null ? Collections.EMPTY_LIST : ls;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SafeVarargs
|
||||
public final void appendListeners(T... listeners) {
|
||||
internalAppendListeners( listeners );
|
||||
checkForArrayRefresh();
|
||||
}
|
||||
|
||||
private void checkForArrayRefresh() {
|
||||
final List<T> list = listenersAsList;
|
||||
if ( this.listeners == null ) {
|
||||
T[] a = (T[]) Array.newInstance( eventType.baseListenerInterface(), list.size() );
|
||||
listeners = list.<T>toArray( a );
|
||||
}
|
||||
}
|
||||
|
||||
private void internalAppendListeners(T[] listeners) {
|
||||
for ( T listener : listeners ) {
|
||||
appendListener( listener );
|
||||
internalAppendListener( listener );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void appendListener(T listener) {
|
||||
internalAppendListener( listener );
|
||||
checkForArrayRefresh();
|
||||
}
|
||||
|
||||
private void internalAppendListener(T listener) {
|
||||
if ( listenerShouldGetAdded( listener ) ) {
|
||||
internalAppend( listener );
|
||||
}
|
||||
|
@ -106,28 +173,39 @@ class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
|||
@Override
|
||||
@SafeVarargs
|
||||
public final void prependListeners(T... listeners) {
|
||||
internalPrependListeners( listeners );
|
||||
checkForArrayRefresh();
|
||||
}
|
||||
|
||||
private void internalPrependListeners(T[] listeners) {
|
||||
for ( T listener : listeners ) {
|
||||
prependListener( listener );
|
||||
internalPreprendListener( listener );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prependListener(T listener) {
|
||||
internalPreprendListener( listener );
|
||||
checkForArrayRefresh();
|
||||
}
|
||||
|
||||
private void internalPreprendListener(T listener) {
|
||||
if ( listenerShouldGetAdded( listener ) ) {
|
||||
internalPrepend( listener );
|
||||
}
|
||||
}
|
||||
|
||||
private boolean listenerShouldGetAdded(T listener) {
|
||||
if ( listeners == null ) {
|
||||
listeners = new ArrayList<>();
|
||||
final List<T> ts = listenersAsList;
|
||||
if ( ts == null ) {
|
||||
listenersAsList = new ArrayList<>();
|
||||
return true;
|
||||
// no need to do de-dup checks
|
||||
}
|
||||
|
||||
boolean doAdd = true;
|
||||
strategy_loop: for ( DuplicationStrategy strategy : duplicationStrategies ) {
|
||||
final ListIterator<T> itr = listeners.listIterator();
|
||||
final ListIterator<T> itr = ts.listIterator();
|
||||
while ( itr.hasNext() ) {
|
||||
final T existingListener = itr.next();
|
||||
if ( strategy.areMatch( listener, existingListener ) ) {
|
||||
|
@ -157,7 +235,8 @@ class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
|||
private void internalPrepend(T listener) {
|
||||
checkAgainstBaseInterface( listener );
|
||||
performInjections( listener );
|
||||
listeners.add( 0, listener );
|
||||
listenersAsList.add( 0, listener );
|
||||
listeners = null; //Marks it for refreshing
|
||||
}
|
||||
|
||||
private void performInjections(T listener) {
|
||||
|
@ -183,6 +262,7 @@ class EventListenerGroupImpl<T> implements EventListenerGroup<T> {
|
|||
private void internalAppend(T listener) {
|
||||
checkAgainstBaseInterface( listener );
|
||||
performInjections( listener );
|
||||
listeners.add( listener );
|
||||
listenersAsList.add( listener );
|
||||
listeners = null; //Marks it for refreshing
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
package org.hibernate.event.service.spi;
|
||||
|
||||
import org.hibernate.Incubating;
|
||||
|
||||
@Incubating
|
||||
@FunctionalInterface
|
||||
public interface EventActionWithParameter<T, U, X> {
|
||||
|
||||
void applyEventToListener(T eventListener, U action, X param);
|
||||
|
||||
}
|
|
@ -7,7 +7,11 @@
|
|||
package org.hibernate.event.service.spi;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.hibernate.Incubating;
|
||||
import org.hibernate.event.spi.EventType;
|
||||
|
||||
/**
|
||||
|
@ -33,6 +37,12 @@ public interface EventListenerGroup<T> extends Serializable {
|
|||
|
||||
public int count();
|
||||
|
||||
/**
|
||||
* @deprecated this is not the most efficient way for iterating the event listeners.
|
||||
* See {@link #fireEventOnEachListener(Object, BiConsumer)} and its overloaded variants for better alternatives.
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public Iterable<T> listeners();
|
||||
|
||||
/**
|
||||
|
@ -54,4 +64,31 @@ public interface EventListenerGroup<T> extends Serializable {
|
|||
|
||||
public void clear();
|
||||
|
||||
/**
|
||||
* Fires an event on each registered event listener of this group.
|
||||
*
|
||||
* Implementation note (performance):
|
||||
* the first argument is a supplier so that events can avoid allocation when no listener is registered.
|
||||
* the second argument is specifically designed to avoid needing a capturing lambda.
|
||||
*
|
||||
* @param eventSupplier
|
||||
* @param actionOnEvent
|
||||
* @param <U> the kind of event
|
||||
*/
|
||||
@Incubating
|
||||
<U> void fireLazyEventOnEachListener(final Supplier<U> eventSupplier, final BiConsumer<T,U> actionOnEvent);
|
||||
|
||||
/**
|
||||
* Similar as {@link #fireLazyEventOnEachListener(Supplier, BiConsumer)} except it doesn't use a {{@link Supplier}}:
|
||||
* useful when there is no need to lazily initialize the event.
|
||||
* @param event
|
||||
* @param actionOnEvent
|
||||
* @param <U> the kind of event
|
||||
*/
|
||||
@Incubating
|
||||
<U> void fireEventOnEachListener(final U event, final BiConsumer<T,U> actionOnEvent);
|
||||
|
||||
@Incubating
|
||||
<U,X> void fireEventOnEachListener(final U event, X param, final EventActionWithParameter<T,U,X> actionOnEvent);
|
||||
|
||||
}
|
||||
|
|
|
@ -48,10 +48,7 @@ public class ParameterTranslationsImpl implements ParameterTranslations {
|
|||
}
|
||||
|
||||
final PositionalParameterSpecification ordinalSpecification = (PositionalParameterSpecification) specification;
|
||||
final PositionalParameterInformationImpl info = ordinalParameters.computeIfAbsent(
|
||||
ordinalSpecification.getLabel(),
|
||||
k -> new PositionalParameterInformationImpl( k, ordinalSpecification.getExpectedType() )
|
||||
);
|
||||
final PositionalParameterInformationImpl info = getPositionalParameterInfo( ordinalParameters, ordinalSpecification );
|
||||
info.addSourceLocation( i++ );
|
||||
}
|
||||
else if ( NamedParameterSpecification.class.isInstance( specification ) ) {
|
||||
|
@ -60,10 +57,7 @@ public class ParameterTranslationsImpl implements ParameterTranslations {
|
|||
}
|
||||
|
||||
final NamedParameterSpecification namedSpecification = (NamedParameterSpecification) specification;
|
||||
final NamedParameterInformationImpl info = namedParameters.computeIfAbsent(
|
||||
namedSpecification.getName(),
|
||||
k -> new NamedParameterInformationImpl( k, namedSpecification.getExpectedType() )
|
||||
);
|
||||
final NamedParameterInformationImpl info = getNamedParameterInfo( namedParameters, namedSpecification );
|
||||
|
||||
/*
|
||||
If a previous reference to the NamedParameter already exists with expected type null and the new
|
||||
|
@ -96,6 +90,30 @@ public class ParameterTranslationsImpl implements ParameterTranslations {
|
|||
}
|
||||
}
|
||||
|
||||
private NamedParameterInformationImpl getNamedParameterInfo(
|
||||
Map<String, NamedParameterInformationImpl> namedParameters,
|
||||
NamedParameterSpecification namedSpecification) {
|
||||
final String name = namedSpecification.getName();
|
||||
NamedParameterInformationImpl namedParameterInformation = namedParameters.get( name );
|
||||
if ( namedParameterInformation == null ) {
|
||||
namedParameterInformation = new NamedParameterInformationImpl( name, namedSpecification.getExpectedType() );
|
||||
namedParameters.put( name, namedParameterInformation );
|
||||
}
|
||||
return namedParameterInformation;
|
||||
}
|
||||
|
||||
private static PositionalParameterInformationImpl getPositionalParameterInfo(
|
||||
Map<Integer, PositionalParameterInformationImpl> ordinalParameters,
|
||||
PositionalParameterSpecification ordinalSpecification) {
|
||||
final Integer label = Integer.valueOf( ordinalSpecification.getLabel() );
|
||||
PositionalParameterInformationImpl positionalParameterInformation = ordinalParameters.get( label );
|
||||
if ( positionalParameterInformation == null ) {
|
||||
positionalParameterInformation = new PositionalParameterInformationImpl( label, ordinalSpecification.getExpectedType() );
|
||||
ordinalParameters.put( label, positionalParameterInformation );
|
||||
}
|
||||
return positionalParameterInformation;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map getNamedParameterInformationMap() {
|
||||
|
|
|
@ -17,6 +17,7 @@ import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
|||
import org.hibernate.hql.internal.ast.HqlSqlWalker;
|
||||
import org.hibernate.hql.internal.ast.SqlGenerator;
|
||||
import org.hibernate.hql.internal.ast.tree.DeleteStatement;
|
||||
import org.hibernate.metamodel.spi.MetamodelImplementor;
|
||||
import org.hibernate.param.ParameterSpecification;
|
||||
import org.hibernate.persister.collection.AbstractCollectionPersister;
|
||||
import org.hibernate.persister.entity.Queryable;
|
||||
|
@ -63,15 +64,18 @@ public class DeleteExecutor extends BasicExecutor {
|
|||
parameterSpecifications = new ArrayList<>();
|
||||
idSubselectWhere = "";
|
||||
}
|
||||
|
||||
|
||||
final boolean commentsEnabled = factory.getSessionFactoryOptions().isCommentsEnabled();
|
||||
final MetamodelImplementor metamodel = factory.getMetamodel();
|
||||
final boolean notSupportingTuplesInSubqueries = !dialect.supportsTuplesInSubqueries();
|
||||
// If many-to-many, delete the FK row in the collection table.
|
||||
for ( Type type : persister.getPropertyTypes() ) {
|
||||
if ( type.isCollectionType() ) {
|
||||
final CollectionType cType = (CollectionType) type;
|
||||
final AbstractCollectionPersister cPersister = (AbstractCollectionPersister) factory.getMetamodel().collectionPersister( cType.getRole() );
|
||||
final AbstractCollectionPersister cPersister = (AbstractCollectionPersister) metamodel.collectionPersister( cType.getRole() );
|
||||
if ( cPersister.isManyToMany() ) {
|
||||
if ( persister.getIdentifierColumnNames().length > 1
|
||||
&& !dialect.supportsTuplesInSubqueries() ) {
|
||||
&& notSupportingTuplesInSubqueries ) {
|
||||
LOG.warn(
|
||||
"This dialect is unable to cascade the delete into the many-to-many join table" +
|
||||
" when the entity has multiple primary keys. Either properly setup cascading on" +
|
||||
|
@ -85,7 +89,7 @@ public class DeleteExecutor extends BasicExecutor {
|
|||
final String where = "(" + String.join( ", ", cPersister.getKeyColumnNames() )
|
||||
+ ") in " + idSubselect;
|
||||
final Delete delete = new Delete().setTableName( cPersister.getTableName() ).setWhere( where );
|
||||
if ( factory.getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
if ( commentsEnabled ) {
|
||||
delete.setComment( "delete FKs in join table" );
|
||||
}
|
||||
deletes.add( delete.toStatementString() );
|
||||
|
|
|
@ -11,6 +11,8 @@ import java.util.Map;
|
|||
import org.hibernate.hql.internal.antlr.HqlSqlTokenTypes;
|
||||
import org.hibernate.hql.internal.ast.util.ColumnHelper;
|
||||
import org.hibernate.persister.collection.QueryableCollection;
|
||||
import org.hibernate.persister.entity.AbstractEntityPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.type.CollectionType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
|
@ -123,4 +125,19 @@ public abstract class AbstractMapComponentNode extends FromReferenceNode impleme
|
|||
|
||||
return MapKeyEntityFromElement.buildKeyJoin( getFromElement() );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getReferencedTables() {
|
||||
String[] referencedTables = null;
|
||||
FromElement fromElement = getFromElement();
|
||||
if ( fromElement != null ) {
|
||||
EntityPersister entityPersister = fromElement.getEntityPersister();
|
||||
if ( entityPersister != null && entityPersister instanceof AbstractEntityPersister ) {
|
||||
AbstractEntityPersister abstractEntityPersister = (AbstractEntityPersister) entityPersister;
|
||||
referencedTables = abstractEntityPersister.getTableNames();
|
||||
}
|
||||
}
|
||||
return referencedTables;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -76,10 +76,18 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
|
|||
* The identifier that is the name of the property.
|
||||
*/
|
||||
private String propertyName;
|
||||
|
||||
/**
|
||||
* The identifier that is the name of the property. In comparison with {@link #propertyName}
|
||||
* it is always identical with identifier in the query, it is not changed during processing.
|
||||
*/
|
||||
private String originalPropertyName;
|
||||
|
||||
/**
|
||||
* The full path, to the root alias of this dot node.
|
||||
*/
|
||||
private String path;
|
||||
|
||||
/**
|
||||
* The unresolved property path relative to this dot node.
|
||||
*/
|
||||
|
@ -160,6 +168,7 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
|
|||
// Set the attributes of the property reference expression.
|
||||
String propName = property.getText();
|
||||
propertyName = propName;
|
||||
originalPropertyName = propName;
|
||||
// If the uresolved property path isn't set yet, just use the property name.
|
||||
if ( propertyPath == null ) {
|
||||
propertyPath = propName;
|
||||
|
@ -692,6 +701,25 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
|
|||
return super.getDataType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getReferencedTables() {
|
||||
String[] referencedTables = null;
|
||||
AST firstChild = getFirstChild();
|
||||
if ( firstChild != null ) {
|
||||
if ( firstChild instanceof FromReferenceNode ) {
|
||||
FromReferenceNode fromReferenceNode = (FromReferenceNode) firstChild;
|
||||
FromElement fromElement = fromReferenceNode.getFromElement();
|
||||
if ( fromElement != null ) {
|
||||
String table = fromElement.getPropertyTableName( getOriginalPropertyName() );
|
||||
if ( table != null ) {
|
||||
referencedTables = new String[] { table };
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return referencedTables;
|
||||
}
|
||||
|
||||
public void setPropertyPath(String propertyPath) {
|
||||
this.propertyPath = propertyPath;
|
||||
}
|
||||
|
@ -700,6 +728,14 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
|
|||
return propertyPath;
|
||||
}
|
||||
|
||||
public String getPropertyName() {
|
||||
return propertyName;
|
||||
}
|
||||
|
||||
public String getOriginalPropertyName() {
|
||||
return originalPropertyName;
|
||||
}
|
||||
|
||||
public FromReferenceNode getLhs() {
|
||||
FromReferenceNode lhs = ( (FromReferenceNode) getFirstChild() );
|
||||
if ( lhs == null ) {
|
||||
|
|
|
@ -343,13 +343,10 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
|
|||
final String[] propertyNames = getIdentifierPropertyNames();
|
||||
List<String> columns = new ArrayList<>();
|
||||
final boolean inSelect = getWalker().getStatementType() == HqlSqlTokenTypes.SELECT;
|
||||
for ( int i = 0; i < propertyNames.length; i++ ) {
|
||||
String[] propertyNameColumns = toColumns(
|
||||
table, propertyNames[i],
|
||||
inSelect
|
||||
);
|
||||
for ( int j = 0; j < propertyNameColumns.length; j++ ) {
|
||||
columns.add( propertyNameColumns[j] );
|
||||
for ( String propertyName : propertyNames ) {
|
||||
String[] propertyNameColumns = toColumns( table, propertyName, inSelect );
|
||||
for ( String propertyNameColumn : propertyNameColumns ) {
|
||||
columns.add( propertyNameColumn );
|
||||
}
|
||||
}
|
||||
return columns.toArray( new String[columns.size()] );
|
||||
|
@ -517,6 +514,10 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
|
|||
return elementType.getPropertyType( propertyName, propertyPath );
|
||||
}
|
||||
|
||||
public String getPropertyTableName(String propertyName) {
|
||||
return elementType.getPropertyTableName( propertyName );
|
||||
}
|
||||
|
||||
public String[] toColumns(String tableAlias, String path, boolean inSelect) {
|
||||
return elementType.toColumns( tableAlias, path, inSelect );
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.hibernate.param.ParameterSpecification;
|
|||
import org.hibernate.persister.collection.CollectionPropertyMapping;
|
||||
import org.hibernate.persister.collection.CollectionPropertyNames;
|
||||
import org.hibernate.persister.collection.QueryableCollection;
|
||||
import org.hibernate.persister.entity.AbstractEntityPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.persister.entity.Joinable;
|
||||
import org.hibernate.persister.entity.PropertyMapping;
|
||||
|
@ -372,6 +373,15 @@ class FromElementType {
|
|||
return queryableCollection;
|
||||
}
|
||||
|
||||
public String getPropertyTableName(String propertyName) {
|
||||
checkInitialized();
|
||||
if ( this.persister != null ) {
|
||||
AbstractEntityPersister aep = (AbstractEntityPersister) this.persister;
|
||||
return aep.getPropertyTableName( propertyName );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the type of a property, given it's name (the last part) and the full path.
|
||||
*
|
||||
|
|
|
@ -137,4 +137,14 @@ public abstract class FromReferenceNode extends AbstractSelectExpression
|
|||
|| getWalker().getStatementType() == HqlSqlTokenTypes.UPDATE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns table names which are referenced by this node. If the tables
|
||||
* can not be determined it returns null.
|
||||
*
|
||||
* @return table names or null.
|
||||
*/
|
||||
public String[] getReferencedTables() {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -161,8 +161,8 @@ public class IntoClause extends HqlSqlWalkerNode implements DisplayableNode {
|
|||
if ( componentIds == null ) {
|
||||
String[] propertyNames = ( (CompositeType) persister.getIdentifierType() ).getPropertyNames();
|
||||
componentIds = new HashSet();
|
||||
for ( int i = 0; i < propertyNames.length; i++ ) {
|
||||
componentIds.add( propertyNames[i] );
|
||||
for ( String propertyName : propertyNames ) {
|
||||
componentIds.add( propertyName );
|
||||
}
|
||||
}
|
||||
if ( componentIds.contains( name ) ) {
|
||||
|
@ -194,8 +194,8 @@ public class IntoClause extends HqlSqlWalkerNode implements DisplayableNode {
|
|||
}
|
||||
|
||||
private void renderColumns(String[] columnNames) {
|
||||
for ( int i = 0; i < columnNames.length; i++ ) {
|
||||
columnSpec += columnNames[i] + ", ";
|
||||
for ( String columnName : columnNames ) {
|
||||
columnSpec += columnName + ", ";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hibernate.hql.internal.ast.tree.DisplayableNode;
|
||||
|
@ -25,7 +27,7 @@ import antlr.collections.AST;
|
|||
* @author Joshua Davis
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public final class ASTPrinter {
|
||||
public class ASTPrinter {
|
||||
|
||||
// This is a map: array index is the ANTLR Token ID, array value is the name of that token.
|
||||
// There might be gaps in the array (null values) but it's generally quite compact.
|
||||
|
@ -103,15 +105,7 @@ public final class ASTPrinter {
|
|||
return;
|
||||
}
|
||||
|
||||
for ( AST parent : parents ) {
|
||||
if ( parent.getNextSibling() == null ) {
|
||||
|
||||
pw.print( " " );
|
||||
}
|
||||
else {
|
||||
pw.print( " | " );
|
||||
}
|
||||
}
|
||||
indentLine( parents, pw );
|
||||
|
||||
if ( ast.getNextSibling() == null ) {
|
||||
pw.print( " \\-" );
|
||||
|
@ -121,6 +115,7 @@ public final class ASTPrinter {
|
|||
}
|
||||
|
||||
showNode( pw, ast );
|
||||
showNodeProperties( parents, pw, ast );
|
||||
|
||||
ArrayList<AST> newParents = new ArrayList<AST>( parents );
|
||||
newParents.add( ast );
|
||||
|
@ -130,6 +125,17 @@ public final class ASTPrinter {
|
|||
newParents.clear();
|
||||
}
|
||||
|
||||
private void indentLine(List<AST> parents, PrintWriter pw) {
|
||||
for ( AST parent : parents ) {
|
||||
if ( parent.getNextSibling() == null ) {
|
||||
pw.print( " " );
|
||||
}
|
||||
else {
|
||||
pw.print( " | " );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void showNode(PrintWriter pw, AST ast) {
|
||||
String s = nodeToString( ast );
|
||||
pw.println( s );
|
||||
|
@ -158,6 +164,24 @@ public final class ASTPrinter {
|
|||
return buf.toString();
|
||||
}
|
||||
|
||||
private void showNodeProperties(ArrayList<AST> parents, PrintWriter pw, AST ast) {
|
||||
Map<String, Object> nodeProperties = createNodeProperties( ast );
|
||||
ArrayList<AST> parentsAndNode = new ArrayList<>( parents );
|
||||
parentsAndNode.add( ast );
|
||||
for ( String propertyName : nodeProperties.keySet() ) {
|
||||
indentLine( parentsAndNode, pw );
|
||||
pw.println( propertyToString( propertyName, nodeProperties.get( propertyName ), ast ) );
|
||||
}
|
||||
}
|
||||
|
||||
public LinkedHashMap<String, Object> createNodeProperties(AST ast) {
|
||||
return new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
public String propertyToString(String label, Object value, AST ast) {
|
||||
return String.format( "%s: %s", label, value );
|
||||
}
|
||||
|
||||
public static void appendEscapedMultibyteChars(String text, StringBuilder buf) {
|
||||
char[] chars = text.toCharArray();
|
||||
for ( char aChar : chars ) {
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
package org.hibernate.hql.internal.ast.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashMap;
|
||||
|
||||
import org.hibernate.hql.internal.ast.tree.DotNode;
|
||||
import org.hibernate.hql.internal.ast.tree.FromElement;
|
||||
import org.hibernate.hql.internal.ast.tree.FromReferenceNode;
|
||||
import org.hibernate.hql.internal.ast.tree.IdentNode;
|
||||
import org.hibernate.hql.internal.ast.tree.SelectClause;
|
||||
import org.hibernate.persister.entity.AbstractEntityPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
|
||||
import antlr.collections.AST;
|
||||
|
||||
public class ASTReferencedTablesPrinter extends ASTPrinter {
|
||||
|
||||
public ASTReferencedTablesPrinter(Class tokenTypeConstants) {
|
||||
super( tokenTypeConstants );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String nodeToString(AST ast) {
|
||||
if ( ast == null ) {
|
||||
return "{node:null}";
|
||||
}
|
||||
return ast.getClass().getSimpleName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinkedHashMap<String, Object> createNodeProperties(AST node) {
|
||||
LinkedHashMap<String, Object> props = new LinkedHashMap<>();
|
||||
if ( node instanceof FromReferenceNode ) {
|
||||
FromReferenceNode frn = (FromReferenceNode) node;
|
||||
FromElement fromElement = frn.getFromElement();
|
||||
EntityPersister entityPersister = fromElement != null ? fromElement.getEntityPersister() : null;
|
||||
String entityPersisterStr = entityPersister != null ? entityPersister.toString() : null;
|
||||
props.put( "persister", entityPersisterStr );
|
||||
String referencedTablesStr = Arrays.toString( frn.getReferencedTables() );
|
||||
props.put( "referencedTables", referencedTablesStr );
|
||||
}
|
||||
if ( node instanceof DotNode ) {
|
||||
DotNode dn = (DotNode) node;
|
||||
props.put( "path", dn.getPath() );
|
||||
props.put( "originalPropertyName", dn.getOriginalPropertyName() );
|
||||
}
|
||||
if ( node instanceof IdentNode ) {
|
||||
IdentNode in = (IdentNode) node;
|
||||
props.put( "originalText", in.getOriginalText() );
|
||||
}
|
||||
if ( node instanceof SelectClause ) {
|
||||
SelectClause sc = (SelectClause) node;
|
||||
for ( Object element : sc.getFromElementsForLoad() ) {
|
||||
FromElement fromElement = (FromElement) element;
|
||||
EntityPersister entityPersister = fromElement.getEntityPersister();
|
||||
if ( entityPersister != null && entityPersister instanceof AbstractEntityPersister ) {
|
||||
AbstractEntityPersister aep = (AbstractEntityPersister) entityPersister;
|
||||
String entityClass = aep.getMappedClass().getSimpleName();
|
||||
String tables = Arrays.toString( aep.getTableNames() );
|
||||
props.put( String.format( "referencedTables(entity %s)", entityClass ), tables );
|
||||
}
|
||||
}
|
||||
}
|
||||
return props;
|
||||
}
|
||||
}
|
|
@ -8,14 +8,17 @@ package org.hibernate.hql.internal.ast.util;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.cfg.AvailableSettings;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.engine.internal.JoinSequence;
|
||||
import org.hibernate.engine.spi.LoadQueryInfluencers;
|
||||
|
@ -24,6 +27,7 @@ import org.hibernate.hql.internal.ast.HqlSqlWalker;
|
|||
import org.hibernate.hql.internal.ast.tree.DotNode;
|
||||
import org.hibernate.hql.internal.ast.tree.FromClause;
|
||||
import org.hibernate.hql.internal.ast.tree.FromElement;
|
||||
import org.hibernate.hql.internal.ast.tree.FromReferenceNode;
|
||||
import org.hibernate.hql.internal.ast.tree.ImpliedFromElement;
|
||||
import org.hibernate.hql.internal.ast.tree.ParameterContainer;
|
||||
import org.hibernate.hql.internal.ast.tree.QueryNode;
|
||||
|
@ -33,11 +37,16 @@ import org.hibernate.internal.CoreMessageLogger;
|
|||
import org.hibernate.internal.FilterImpl;
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
import org.hibernate.param.DynamicFilterParameterSpecification;
|
||||
import org.hibernate.persister.entity.AbstractEntityPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.sql.JoinFragment;
|
||||
import org.hibernate.sql.JoinType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
import antlr.collections.AST;
|
||||
|
||||
/**
|
||||
* Performs the post-processing of the join information gathered during semantic analysis.
|
||||
* The join generating classes are complex, this encapsulates some of the JoinSequence-related
|
||||
|
@ -94,9 +103,86 @@ public class JoinProcessor implements SqlTokenTypes {
|
|||
}
|
||||
}
|
||||
|
||||
private <T extends AST> List<T> findAllNodes(AST node, Class<T> clazz) {
|
||||
ArrayList<T> found = new ArrayList<>();
|
||||
doFindAllNodes( node, clazz, found );
|
||||
return found;
|
||||
}
|
||||
|
||||
private <T extends AST> void doFindAllNodes(AST node, Class<T> clazz, List<T> found) {
|
||||
if ( clazz.isAssignableFrom( node.getClass() ) ) {
|
||||
found.add( (T) node );
|
||||
}
|
||||
if ( node.getFirstChild() != null ) {
|
||||
doFindAllNodes( node.getFirstChild(), clazz, found );
|
||||
}
|
||||
if ( node.getNextSibling() != null ) {
|
||||
doFindAllNodes( node.getNextSibling(), clazz, found );
|
||||
}
|
||||
}
|
||||
|
||||
private Set<String> findQueryReferencedTables(QueryNode query) {
|
||||
if ( !walker.getSessionFactoryHelper()
|
||||
.getFactory()
|
||||
.getSessionFactoryOptions()
|
||||
.isOmitJoinOfSuperclassTablesEnabled() ) {
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debug( String.format(
|
||||
"Finding of query referenced tables is skipped because the feature is disabled. See %s",
|
||||
AvailableSettings.OMIT_JOIN_OF_SUPERCLASS_TABLES
|
||||
) );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if ( CollectionHelper.isNotEmpty( walker.getEnabledFilters() ) ) {
|
||||
LOG.debug( "Finding of query referenced tables is skipped because filters are enabled." );
|
||||
return null;
|
||||
}
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debug( TokenPrinters.REFERENCED_TABLES_PRINTER.showAsString(
|
||||
query,
|
||||
"Tables referenced from query nodes:"
|
||||
) );
|
||||
}
|
||||
|
||||
Set<String> result = new HashSet<>();
|
||||
|
||||
// Find tables referenced by FromReferenceNodes
|
||||
List<FromReferenceNode> fromReferenceNodes = findAllNodes( query, FromReferenceNode.class );
|
||||
for ( FromReferenceNode node : fromReferenceNodes ) {
|
||||
String[] tables = node.getReferencedTables();
|
||||
if ( tables != null ) {
|
||||
for ( String table : tables ) {
|
||||
result.add( table );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find tables referenced by fromElementsForLoad
|
||||
if ( query.getSelectClause() != null ) {
|
||||
for ( Object element : query.getSelectClause().getFromElementsForLoad() ) {
|
||||
FromElement fromElement = (FromElement) element;
|
||||
EntityPersister entityPersister = fromElement.getEntityPersister();
|
||||
if ( entityPersister != null && entityPersister instanceof AbstractEntityPersister ) {
|
||||
AbstractEntityPersister aep = (AbstractEntityPersister) entityPersister;
|
||||
String[] tables = aep.getTableNames();
|
||||
for ( String table : tables ) {
|
||||
result.add( table );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public void processJoins(QueryNode query) {
|
||||
final FromClause fromClause = query.getFromClause();
|
||||
|
||||
Set<String> queryReferencedTables = findQueryReferencedTables( query );
|
||||
|
||||
final List fromElements;
|
||||
if ( DotNode.useThetaStyleImplicitJoins ) {
|
||||
// for regression testing against output from the old parser...
|
||||
|
@ -136,6 +222,7 @@ public class JoinProcessor implements SqlTokenTypes {
|
|||
while ( iter.hasNext() ) {
|
||||
final FromElement fromElement = (FromElement) iter.next();
|
||||
JoinSequence join = fromElement.getJoinSequence();
|
||||
join.setQueryReferencedTables( queryReferencedTables );
|
||||
join.setSelector(
|
||||
new JoinSequence.Selector() {
|
||||
public boolean includeSubclasses(String alias) {
|
||||
|
|
|
@ -21,4 +21,6 @@ public interface TokenPrinters {
|
|||
|
||||
ASTPrinter ORDERBY_FRAGMENT_PRINTER = new ASTPrinter( GeneratedOrderByFragmentRendererTokenTypes.class );
|
||||
|
||||
ASTPrinter REFERENCED_TABLES_PRINTER = new ASTReferencedTablesPrinter( SqlTokenTypes.class );
|
||||
|
||||
}
|
||||
|
|
|
@ -143,8 +143,9 @@ public class TableBasedUpdateHandlerImpl
|
|||
ps = session.getJdbcCoordinator().getStatementPreparer().prepareStatement( updates[i], false );
|
||||
if ( assignmentParameterSpecifications[i] != null ) {
|
||||
int position = 1; // jdbc params are 1-based
|
||||
for ( int x = 0; x < assignmentParameterSpecifications[i].length; x++ ) {
|
||||
position += assignmentParameterSpecifications[i][x].bind( ps, queryParameters, session, position );
|
||||
for ( ParameterSpecification assignmentParameterSpecification : assignmentParameterSpecifications[i] ) {
|
||||
position += assignmentParameterSpecification
|
||||
.bind( ps, queryParameters, session, position );
|
||||
}
|
||||
handleAddedParametersOnUpdate( ps, session, position );
|
||||
}
|
||||
|
|
|
@ -112,8 +112,8 @@ public class CteValuesListUpdateHandlerImpl
|
|||
}
|
||||
}
|
||||
if ( assignmentParameterSpecifications[i] != null ) {
|
||||
for ( int x = 0; x < assignmentParameterSpecifications[i].length; x++ ) {
|
||||
position += assignmentParameterSpecifications[i][x]
|
||||
for ( ParameterSpecification assignmentParameterSpecification : assignmentParameterSpecifications[i] ) {
|
||||
position += assignmentParameterSpecification
|
||||
.bind( ps, queryParameters, session, position );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -103,8 +103,8 @@ public abstract class AbstractInlineIdsUpdateHandlerImpl
|
|||
.prepareStatement( update, false )) {
|
||||
int position = 1; // jdbc params are 1-based
|
||||
if ( assignmentParameterSpecifications[i] != null ) {
|
||||
for ( int x = 0; x < assignmentParameterSpecifications[i].length; x++ ) {
|
||||
position += assignmentParameterSpecifications[i][x]
|
||||
for ( ParameterSpecification assignmentParameterSpecification : assignmentParameterSpecifications[i] ) {
|
||||
position += assignmentParameterSpecification
|
||||
.bind( ps, queryParameters, session, position );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,20 +32,16 @@ import org.hibernate.HibernateException;
|
|||
import org.hibernate.Interceptor;
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.MultiTenancyStrategy;
|
||||
import org.hibernate.SessionEventListener;
|
||||
import org.hibernate.SessionException;
|
||||
import org.hibernate.Transaction;
|
||||
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
|
||||
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
|
||||
import org.hibernate.cache.spi.CacheTransactionSynchronization;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.engine.ResultSetMappingDefinition;
|
||||
import org.hibernate.engine.internal.SessionEventListenerManagerImpl;
|
||||
import org.hibernate.engine.jdbc.LobCreationContext;
|
||||
import org.hibernate.engine.jdbc.LobCreator;
|
||||
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
|
||||
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
|
||||
import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider;
|
||||
import org.hibernate.engine.jdbc.internal.JdbcCoordinatorImpl;
|
||||
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
|
||||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||
|
@ -116,6 +112,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
private transient SessionFactoryImpl factory;
|
||||
private final String tenantIdentifier;
|
||||
protected transient FastSessionServices fastSessionServices;
|
||||
private UUID sessionIdentifier;
|
||||
|
||||
private transient JdbcConnectionAccess jdbcConnectionAccess;
|
||||
|
@ -138,12 +135,10 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
protected boolean closed;
|
||||
protected boolean waitingForAutoClose;
|
||||
private transient boolean disallowOutOfTransactionUpdateOperations;
|
||||
|
||||
// transient & non-final for Serialization purposes - ugh
|
||||
private transient SessionEventListenerManagerImpl sessionEventsManager = new SessionEventListenerManagerImpl();
|
||||
private transient SessionEventListenerManagerImpl sessionEventsManager;
|
||||
private transient EntityNameResolver entityNameResolver;
|
||||
private transient Boolean useStreamForLobBinding;
|
||||
|
||||
private Integer jdbcBatchSize;
|
||||
|
||||
|
@ -154,8 +149,9 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
public AbstractSharedSessionContract(SessionFactoryImpl factory, SessionCreationOptions options) {
|
||||
this.factory = factory;
|
||||
this.fastSessionServices = factory.getFastSessionServices();
|
||||
this.cacheTransactionSync = factory.getCache().getRegionFactory().createTransactionContext( this );
|
||||
this.disallowOutOfTransactionUpdateOperations = !factory.getSessionFactoryOptions().isAllowOutOfTransactionUpdateOperations();
|
||||
|
||||
|
||||
this.flushMode = options.getInitialSessionFlushMode();
|
||||
|
||||
|
@ -173,9 +169,16 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
this.interceptor = interpret( options.getInterceptor() );
|
||||
this.jdbcTimeZone = options.getJdbcTimeZone();
|
||||
final List<SessionEventListener> customSessionEventListener = options.getCustomSessionEventListener();
|
||||
if ( customSessionEventListener == null ) {
|
||||
sessionEventsManager = new SessionEventListenerManagerImpl( fastSessionServices.defaultSessionEventListeners.buildBaseline() );
|
||||
}
|
||||
else {
|
||||
sessionEventsManager = new SessionEventListenerManagerImpl( customSessionEventListener.toArray( new SessionEventListener[0] ) );
|
||||
}
|
||||
|
||||
final StatementInspector statementInspector = interpret( options.getStatementInspector() );
|
||||
this.jdbcSessionContext = new JdbcSessionContextImpl( this, statementInspector );
|
||||
this.jdbcSessionContext = new JdbcSessionContextImpl( this, statementInspector, fastSessionServices );
|
||||
|
||||
this.entityNameResolver = new CoordinatingEntityNameResolver( factory, interceptor );
|
||||
|
||||
|
@ -212,11 +215,8 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
else {
|
||||
this.isTransactionCoordinatorShared = false;
|
||||
this.autoJoinTransactions = options.shouldAutoJoinTransactions();
|
||||
|
||||
this.jdbcCoordinator = new JdbcCoordinatorImpl( options.getConnection(), this );
|
||||
this.transactionCoordinator = factory.getServiceRegistry()
|
||||
.getService( TransactionCoordinatorBuilder.class )
|
||||
.buildTransactionCoordinator( jdbcCoordinator, this );
|
||||
this.jdbcCoordinator = new JdbcCoordinatorImpl( options.getConnection(), this, fastSessionServices.jdbcServices );
|
||||
this.transactionCoordinator = fastSessionServices.transactionCoordinatorBuilder.buildTransactionCoordinator( jdbcCoordinator, this );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -405,14 +405,14 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
@Override
|
||||
public void checkTransactionNeededForUpdateOperation(String exceptionMessage) {
|
||||
if ( disallowOutOfTransactionUpdateOperations && !isTransactionInProgress() ) {
|
||||
if ( fastSessionServices.disallowOutOfTransactionUpdateOperations && !isTransactionInProgress() ) {
|
||||
throw new TransactionRequiredException( exceptionMessage );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Transaction getTransaction() throws HibernateException {
|
||||
if ( !isTransactionAccessible() ) {
|
||||
if ( ! fastSessionServices.isJtaTransactionAccessible ) {
|
||||
throw new IllegalStateException(
|
||||
"Transaction is not accessible when using JTA with JPA-compliant transaction access enabled"
|
||||
);
|
||||
|
@ -420,17 +420,6 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
return accessTransaction();
|
||||
}
|
||||
|
||||
protected boolean isTransactionAccessible() {
|
||||
// JPA requires that access not be provided to the transaction when using JTA.
|
||||
// This is overridden when SessionFactoryOptions isJtaTransactionAccessEnabled() is true.
|
||||
if ( getFactory().getSessionFactoryOptions().getJpaCompliance().isJpaTransactionComplianceEnabled() &&
|
||||
getTransactionCoordinator().getTransactionCoordinatorBuilder().isJta() &&
|
||||
!getFactory().getSessionFactoryOptions().isJtaTransactionAccessEnabled() ) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Transaction accessTransaction() {
|
||||
if ( this.currentHibernateTransaction == null ) {
|
||||
|
@ -439,7 +428,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
this
|
||||
);
|
||||
}
|
||||
if ( !isClosed() || (waitingForAutoClose && factory.isOpen()) ) {
|
||||
if ( !isClosed() || ( waitingForAutoClose && factory.isOpen() ) ) {
|
||||
getTransactionCoordinator().pulse();
|
||||
}
|
||||
return this.currentHibernateTransaction;
|
||||
|
@ -512,17 +501,17 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
public JdbcConnectionAccess getJdbcConnectionAccess() {
|
||||
// See class-level JavaDocs for a discussion of the concurrent-access safety of this method
|
||||
if ( jdbcConnectionAccess == null ) {
|
||||
if ( !factory.getSettings().getMultiTenancyStrategy().requiresMultiTenantConnectionProvider() ) {
|
||||
if ( ! fastSessionServices.requiresMultiTenantConnectionProvider ) {
|
||||
jdbcConnectionAccess = new NonContextualJdbcConnectionAccess(
|
||||
getEventListenerManager(),
|
||||
factory.getServiceRegistry().getService( ConnectionProvider.class )
|
||||
fastSessionServices.connectionProvider
|
||||
);
|
||||
}
|
||||
else {
|
||||
jdbcConnectionAccess = new ContextualJdbcConnectionAccess(
|
||||
getTenantIdentifier(),
|
||||
getEventListenerManager(),
|
||||
factory.getServiceRegistry().getService( MultiTenantConnectionProvider.class )
|
||||
fastSessionServices.multiTenantConnectionProvider
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -536,11 +525,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
@Override
|
||||
public boolean useStreamForLobBinding() {
|
||||
if ( useStreamForLobBinding == null ) {
|
||||
useStreamForLobBinding = Environment.useStreamsForBinary()
|
||||
|| getJdbcServices().getJdbcEnvironment().getDialect().useInputStreamToInsertBlob();
|
||||
}
|
||||
return useStreamForLobBinding;
|
||||
return fastSessionServices.useStreamForLobBinding;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -567,13 +552,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
@Override
|
||||
public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) {
|
||||
if ( !sqlTypeDescriptor.canBeRemapped() ) {
|
||||
return sqlTypeDescriptor;
|
||||
}
|
||||
|
||||
final Dialect dialect = getJdbcServices().getJdbcEnvironment().getDialect();
|
||||
final SqlTypeDescriptor remapped = dialect.remapSqlTypeDescriptor( sqlTypeDescriptor );
|
||||
return remapped == null ? sqlTypeDescriptor : remapped;
|
||||
return fastSessionServices.remapSqlTypeDescriptor( sqlTypeDescriptor );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -928,7 +907,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
|
||||
@SuppressWarnings({"WeakerAccess", "unchecked"})
|
||||
protected <T> NativeQueryImplementor createNativeQuery(NamedSQLQueryDefinition queryDefinition, Class<T> resultType) {
|
||||
if ( resultType != null && !Tuple.class.equals(resultType)) {
|
||||
if ( resultType != null && !Tuple.class.equals( resultType ) ) {
|
||||
resultClassChecking( resultType, queryDefinition );
|
||||
}
|
||||
|
||||
|
@ -937,8 +916,8 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
this,
|
||||
factory.getQueryPlanCache().getSQLParameterMetadata( queryDefinition.getQueryString(), false )
|
||||
);
|
||||
if (Tuple.class.equals(resultType)) {
|
||||
query.setResultTransformer(new NativeQueryTupleTransformer());
|
||||
if ( Tuple.class.equals( resultType ) ) {
|
||||
query.setResultTransformer( new NativeQueryTupleTransformer() );
|
||||
}
|
||||
query.setHibernateFlushMode( queryDefinition.getFlushMode() );
|
||||
query.setComment( queryDefinition.getComment() != null ? queryDefinition.getComment() : queryDefinition.getName() );
|
||||
|
@ -976,7 +955,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
final Class<?> actualReturnedClass;
|
||||
final String entityClassName = ( (NativeSQLQueryRootReturn) nativeSQLQueryReturn ).getReturnEntityName();
|
||||
try {
|
||||
actualReturnedClass = getFactory().getServiceRegistry().getService( ClassLoaderService.class ).classForName( entityClassName );
|
||||
actualReturnedClass = fastSessionServices.classLoaderService.classForName( entityClassName );
|
||||
}
|
||||
catch ( ClassLoadingException e ) {
|
||||
throw new AssertionFailure(
|
||||
|
@ -1219,14 +1198,15 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// Step 1 :: read back non-transient state...
|
||||
ois.defaultReadObject();
|
||||
sessionEventsManager = new SessionEventListenerManagerImpl();
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// Step 2 :: read back transient state...
|
||||
// -- see above
|
||||
|
||||
factory = SessionFactoryImpl.deserialize( ois );
|
||||
jdbcSessionContext = new JdbcSessionContextImpl( this, (StatementInspector) ois.readObject() );
|
||||
fastSessionServices = factory.getFastSessionServices();
|
||||
sessionEventsManager = new SessionEventListenerManagerImpl( fastSessionServices.defaultSessionEventListeners.buildBaseline() );
|
||||
jdbcSessionContext = new JdbcSessionContextImpl( this, (StatementInspector) ois.readObject(), fastSessionServices );
|
||||
jdbcCoordinator = JdbcCoordinatorImpl.deserialize( ois, this );
|
||||
|
||||
cacheTransactionSync = factory.getCache().getRegionFactory().createTransactionContext( this );
|
||||
|
@ -1236,7 +1216,6 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
|
|||
.buildTransactionCoordinator( jdbcCoordinator, this );
|
||||
|
||||
entityNameResolver = new CoordinatingEntityNameResolver( factory, interceptor );
|
||||
this.disallowOutOfTransactionUpdateOperations = !getFactory().getSessionFactoryOptions().isAllowOutOfTransactionUpdateOperations();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import java.sql.SQLException;
|
|||
import java.sql.SQLWarning;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Properties;
|
||||
import java.util.ServiceConfigurationError;
|
||||
import java.util.Set;
|
||||
import javax.naming.NameNotFoundException;
|
||||
import javax.naming.NamingException;
|
||||
|
@ -1871,4 +1872,8 @@ public interface CoreMessageLogger extends BasicLogger {
|
|||
@Message(value = "Multiple configuration properties defined to create schema. Choose at most one among 'javax.persistence.create-database-schemas', 'hibernate.hbm2ddl.create_namespaces', 'hibernate.hbm2dll.create_namespaces' (this last being deprecated).", id = 504)
|
||||
void multipleSchemaCreationSettingsDefined();
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(value = "Ignoring ServiceConfigurationError caught while trying to instantiate service '%s'.", id = 505)
|
||||
void ignoringServiceConfigurationError(Class<?> serviceContract, @Cause ServiceConfigurationError error);
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,280 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
package org.hibernate.internal;
|
||||
|
||||
import org.hibernate.CacheMode;
|
||||
import org.hibernate.FlushMode;
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
|
||||
import org.hibernate.boot.spi.SessionFactoryOptions;
|
||||
import org.hibernate.cfg.BaselineSessionEventsListenerBuilder;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
|
||||
import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider;
|
||||
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
|
||||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||
import org.hibernate.event.service.spi.EventListenerGroup;
|
||||
import org.hibernate.event.service.spi.EventListenerRegistry;
|
||||
import org.hibernate.event.spi.AutoFlushEventListener;
|
||||
import org.hibernate.event.spi.ClearEventListener;
|
||||
import org.hibernate.event.spi.DeleteEventListener;
|
||||
import org.hibernate.event.spi.DirtyCheckEventListener;
|
||||
import org.hibernate.event.spi.EventType;
|
||||
import org.hibernate.event.spi.EvictEventListener;
|
||||
import org.hibernate.event.spi.FlushEventListener;
|
||||
import org.hibernate.event.spi.InitializeCollectionEventListener;
|
||||
import org.hibernate.event.spi.LoadEventListener;
|
||||
import org.hibernate.event.spi.LockEventListener;
|
||||
import org.hibernate.event.spi.MergeEventListener;
|
||||
import org.hibernate.event.spi.PersistEventListener;
|
||||
import org.hibernate.event.spi.RefreshEventListener;
|
||||
import org.hibernate.event.spi.ReplicateEventListener;
|
||||
import org.hibernate.event.spi.ResolveNaturalIdEventListener;
|
||||
import org.hibernate.event.spi.SaveOrUpdateEventListener;
|
||||
import org.hibernate.jpa.AvailableSettings;
|
||||
import org.hibernate.jpa.QueryHints;
|
||||
import org.hibernate.jpa.internal.util.CacheModeHelper;
|
||||
import org.hibernate.jpa.internal.util.LockOptionsHelper;
|
||||
import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
|
||||
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
||||
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import javax.persistence.CacheRetrieveMode;
|
||||
import javax.persistence.CacheStoreMode;
|
||||
import javax.persistence.PessimisticLockScope;
|
||||
|
||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
||||
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_RETRIEVE_MODE;
|
||||
import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_STORE_MODE;
|
||||
|
||||
/**
|
||||
* Internal component.
|
||||
*
|
||||
* Collects any components that any Session implementation will likely need
|
||||
* for faster access and reduced allocations.
|
||||
* Conceptually this acts as an immutable caching intermediary between Session
|
||||
* and SessionFactory.
|
||||
* Designed to be immutable, and shared across Session instances.
|
||||
*
|
||||
* Assumes to be created infrequently, possibly only once per SessionFactory.
|
||||
*
|
||||
* If the Session is requiring to retrieve (or compute) anything from the SessionFactory,
|
||||
* and this computation would result in the same outcome for any Session created on
|
||||
* this same SessionFactory, then it belongs in a final field of this class.
|
||||
*
|
||||
* Finally, consider also limiting the size of each Session: some fields could be good
|
||||
* candidates to be replaced with access via this object.
|
||||
*
|
||||
* @author Sanne Grinovero
|
||||
*/
|
||||
final class FastSessionServices {
|
||||
|
||||
/**
|
||||
* Default session properties
|
||||
*/
|
||||
final Map<String, Object> defaultSessionProperties;
|
||||
|
||||
// All session events need to be iterated frequently:
|
||||
final EventListenerGroup<AutoFlushEventListener> eventListenerGroup_AUTO_FLUSH;
|
||||
final EventListenerGroup<ClearEventListener> eventListenerGroup_CLEAR;
|
||||
final EventListenerGroup<DeleteEventListener> eventListenerGroup_DELETE;
|
||||
final EventListenerGroup<DirtyCheckEventListener> eventListenerGroup_DIRTY_CHECK;
|
||||
final EventListenerGroup<EvictEventListener> eventListenerGroup_EVICT;
|
||||
final EventListenerGroup<FlushEventListener> eventListenerGroup_FLUSH;
|
||||
final EventListenerGroup<InitializeCollectionEventListener> eventListenerGroup_INIT_COLLECTION;
|
||||
final EventListenerGroup<LoadEventListener> eventListenerGroup_LOAD;
|
||||
final EventListenerGroup<LockEventListener> eventListenerGroup_LOCK;
|
||||
final EventListenerGroup<MergeEventListener> eventListenerGroup_MERGE;
|
||||
final EventListenerGroup<PersistEventListener> eventListenerGroup_PERSIST;
|
||||
final EventListenerGroup<PersistEventListener> eventListenerGroup_PERSIST_ONFLUSH;
|
||||
final EventListenerGroup<RefreshEventListener> eventListenerGroup_REFRESH;
|
||||
final EventListenerGroup<ReplicateEventListener> eventListenerGroup_REPLICATE;
|
||||
final EventListenerGroup<ResolveNaturalIdEventListener> eventListenerGroup_RESOLVE_NATURAL_ID;
|
||||
final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_SAVE;
|
||||
final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_SAVE_UPDATE;
|
||||
final EventListenerGroup<SaveOrUpdateEventListener> eventListenerGroup_UPDATE;
|
||||
|
||||
//Intentionally Package private:
|
||||
final boolean disallowOutOfTransactionUpdateOperations;
|
||||
final boolean useStreamForLobBinding;
|
||||
final boolean requiresMultiTenantConnectionProvider;
|
||||
final ConnectionProvider connectionProvider;
|
||||
final MultiTenantConnectionProvider multiTenantConnectionProvider;
|
||||
final ClassLoaderService classLoaderService;
|
||||
final TransactionCoordinatorBuilder transactionCoordinatorBuilder;
|
||||
final JdbcServices jdbcServices;
|
||||
final boolean isJtaTransactionAccessible;
|
||||
final CacheMode initialSessionCacheMode;
|
||||
final boolean discardOnClose;
|
||||
final BaselineSessionEventsListenerBuilder defaultSessionEventListeners;
|
||||
final LockOptions defaultLockOptions;
|
||||
|
||||
//Private fields:
|
||||
private final Dialect dialect;
|
||||
private final CacheStoreMode defaultCacheStoreMode;
|
||||
private final CacheRetrieveMode defaultCacheRetrieveMode;
|
||||
private final ConnectionObserverStatsBridge defaultJdbcObservers;
|
||||
|
||||
FastSessionServices(SessionFactoryImpl sf) {
|
||||
Objects.requireNonNull( sf );
|
||||
final ServiceRegistryImplementor sr = sf.getServiceRegistry();
|
||||
final JdbcServices jdbcServices = sf.getJdbcServices();
|
||||
final SessionFactoryOptions sessionFactoryOptions = sf.getSessionFactoryOptions();
|
||||
|
||||
// Pre-compute all iterators on Event listeners:
|
||||
final EventListenerRegistry eventListenerRegistry = sr.getService( EventListenerRegistry.class );
|
||||
this.eventListenerGroup_AUTO_FLUSH = listeners( eventListenerRegistry, EventType.AUTO_FLUSH );
|
||||
this.eventListenerGroup_CLEAR = listeners( eventListenerRegistry, EventType.CLEAR );
|
||||
this.eventListenerGroup_DELETE = listeners( eventListenerRegistry, EventType.DELETE );
|
||||
this.eventListenerGroup_DIRTY_CHECK = listeners( eventListenerRegistry, EventType.DIRTY_CHECK );
|
||||
this.eventListenerGroup_EVICT = listeners( eventListenerRegistry, EventType.EVICT );
|
||||
this.eventListenerGroup_FLUSH = listeners( eventListenerRegistry, EventType.FLUSH );
|
||||
this.eventListenerGroup_INIT_COLLECTION = listeners( eventListenerRegistry, EventType.INIT_COLLECTION );
|
||||
this.eventListenerGroup_LOAD = listeners( eventListenerRegistry, EventType.LOAD );
|
||||
this.eventListenerGroup_LOCK = listeners( eventListenerRegistry, EventType.LOCK );
|
||||
this.eventListenerGroup_MERGE = listeners( eventListenerRegistry, EventType.MERGE );
|
||||
this.eventListenerGroup_PERSIST = listeners( eventListenerRegistry, EventType.PERSIST );
|
||||
this.eventListenerGroup_PERSIST_ONFLUSH = listeners( eventListenerRegistry, EventType.PERSIST_ONFLUSH );
|
||||
this.eventListenerGroup_REFRESH = listeners( eventListenerRegistry, EventType.REFRESH );
|
||||
this.eventListenerGroup_REPLICATE = listeners( eventListenerRegistry, EventType.REPLICATE );
|
||||
this.eventListenerGroup_RESOLVE_NATURAL_ID = listeners( eventListenerRegistry, EventType.RESOLVE_NATURAL_ID );
|
||||
this.eventListenerGroup_SAVE = listeners( eventListenerRegistry, EventType.SAVE );
|
||||
this.eventListenerGroup_SAVE_UPDATE = listeners( eventListenerRegistry, EventType.SAVE_UPDATE );
|
||||
this.eventListenerGroup_UPDATE = listeners( eventListenerRegistry, EventType.UPDATE );
|
||||
|
||||
//Other highly useful constants:
|
||||
this.dialect = jdbcServices.getJdbcEnvironment().getDialect();
|
||||
this.disallowOutOfTransactionUpdateOperations = !sessionFactoryOptions.isAllowOutOfTransactionUpdateOperations();
|
||||
this.useStreamForLobBinding = Environment.useStreamsForBinary() || dialect.useInputStreamToInsertBlob();
|
||||
this.requiresMultiTenantConnectionProvider = sf.getSettings().getMultiTenancyStrategy().requiresMultiTenantConnectionProvider();
|
||||
|
||||
//Some "hot" services:
|
||||
this.connectionProvider = requiresMultiTenantConnectionProvider ? null : sr.getService( ConnectionProvider.class );
|
||||
this.multiTenantConnectionProvider = requiresMultiTenantConnectionProvider ? sr.getService( MultiTenantConnectionProvider.class ) : null;
|
||||
this.classLoaderService = sr.getService( ClassLoaderService.class );
|
||||
this.transactionCoordinatorBuilder = sr.getService( TransactionCoordinatorBuilder.class );
|
||||
this.jdbcServices = sr.getService( JdbcServices.class );
|
||||
|
||||
this.isJtaTransactionAccessible = isTransactionAccessible( sf, transactionCoordinatorBuilder );
|
||||
|
||||
this.defaultSessionProperties = initializeDefaultSessionProperties( sf );
|
||||
this.defaultCacheStoreMode = determineCacheStoreMode( defaultSessionProperties );
|
||||
this.defaultCacheRetrieveMode = determineCacheRetrieveMode( defaultSessionProperties );
|
||||
this.initialSessionCacheMode = CacheModeHelper.interpretCacheMode( defaultCacheStoreMode, defaultCacheRetrieveMode );
|
||||
this.discardOnClose = sessionFactoryOptions.isReleaseResourcesOnCloseEnabled();
|
||||
this.defaultJdbcObservers = new ConnectionObserverStatsBridge( sf );
|
||||
this.defaultSessionEventListeners = sessionFactoryOptions.getBaselineSessionEventsListenerBuilder();
|
||||
this.defaultLockOptions = initializeDefaultLockOptions( defaultSessionProperties );
|
||||
}
|
||||
|
||||
private static LockOptions initializeDefaultLockOptions(final Map<String, Object> defaultSessionProperties) {
|
||||
LockOptions def = new LockOptions();
|
||||
LockOptionsHelper.applyPropertiesToLockOptions( defaultSessionProperties, () -> def );
|
||||
return def;
|
||||
}
|
||||
|
||||
private static <T> EventListenerGroup<T> listeners(EventListenerRegistry elr, EventType<T> type) {
|
||||
return elr.getEventListenerGroup( type );
|
||||
}
|
||||
|
||||
SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) {
|
||||
if ( !sqlTypeDescriptor.canBeRemapped() ) {
|
||||
return sqlTypeDescriptor;
|
||||
}
|
||||
|
||||
final SqlTypeDescriptor remapped = dialect.remapSqlTypeDescriptor( sqlTypeDescriptor );
|
||||
return remapped == null ? sqlTypeDescriptor : remapped;
|
||||
}
|
||||
|
||||
private static boolean isTransactionAccessible(SessionFactoryImpl sf, TransactionCoordinatorBuilder transactionCoordinatorBuilder) {
|
||||
// JPA requires that access not be provided to the transaction when using JTA.
|
||||
// This is overridden when SessionFactoryOptions isJtaTransactionAccessEnabled() is true.
|
||||
if ( sf.getSessionFactoryOptions().getJpaCompliance().isJpaTransactionComplianceEnabled() &&
|
||||
transactionCoordinatorBuilder.isJta() &&
|
||||
!sf.getSessionFactoryOptions().isJtaTransactionAccessEnabled() ) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static Map<String, Object> initializeDefaultSessionProperties(SessionFactoryImpl sf) {
|
||||
HashMap<String,Object> p = new HashMap<>();
|
||||
|
||||
//Static defaults:
|
||||
p.putIfAbsent( AvailableSettings.FLUSH_MODE, FlushMode.AUTO.name() );
|
||||
p.putIfAbsent( JPA_LOCK_SCOPE, PessimisticLockScope.EXTENDED.name() );
|
||||
p.putIfAbsent( JPA_LOCK_TIMEOUT, LockOptions.WAIT_FOREVER );
|
||||
p.putIfAbsent( JPA_SHARED_CACHE_RETRIEVE_MODE, CacheModeHelper.DEFAULT_RETRIEVE_MODE );
|
||||
p.putIfAbsent( JPA_SHARED_CACHE_STORE_MODE, CacheModeHelper.DEFAULT_STORE_MODE );
|
||||
|
||||
//Defaults defined by SessionFactory configuration:
|
||||
final String[] ENTITY_MANAGER_SPECIFIC_PROPERTIES = {
|
||||
JPA_LOCK_SCOPE,
|
||||
JPA_LOCK_TIMEOUT,
|
||||
AvailableSettings.FLUSH_MODE,
|
||||
JPA_SHARED_CACHE_RETRIEVE_MODE,
|
||||
JPA_SHARED_CACHE_STORE_MODE,
|
||||
QueryHints.SPEC_HINT_TIMEOUT
|
||||
};
|
||||
final Map<String, Object> properties = sf.getProperties();
|
||||
for ( String key : ENTITY_MANAGER_SPECIFIC_PROPERTIES ) {
|
||||
if ( properties.containsKey( key ) ) {
|
||||
p.put( key, properties.get( key ) );
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableMap( p );
|
||||
}
|
||||
|
||||
/**
|
||||
* @param properties the Session properties
|
||||
* @return either the CacheStoreMode as defined in the Session specific properties, or as defined in the
|
||||
* properties shared across all sessions (the defaults).
|
||||
*/
|
||||
CacheStoreMode getCacheStoreMode(final Map<String, Object> properties) {
|
||||
if ( properties == null ) {
|
||||
return this.defaultCacheStoreMode;
|
||||
}
|
||||
else {
|
||||
return determineCacheStoreMode( properties );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param properties the Session properties
|
||||
* @return either the CacheRetrieveMode as defined in the Session specific properties, or as defined in the
|
||||
* properties shared across all sessions (the defaults).
|
||||
*/
|
||||
CacheRetrieveMode getCacheRetrieveMode(Map<String, Object> properties) {
|
||||
if ( properties == null ) {
|
||||
return this.defaultCacheRetrieveMode;
|
||||
}
|
||||
else {
|
||||
return determineCacheRetrieveMode( properties );
|
||||
}
|
||||
}
|
||||
|
||||
private static CacheRetrieveMode determineCacheRetrieveMode(Map<String, Object> settings) {
|
||||
return ( CacheRetrieveMode ) settings.get( JPA_SHARED_CACHE_RETRIEVE_MODE );
|
||||
}
|
||||
|
||||
private static CacheStoreMode determineCacheStoreMode(Map<String, Object> settings) {
|
||||
return ( CacheStoreMode ) settings.get( JPA_SHARED_CACHE_STORE_MODE );
|
||||
}
|
||||
|
||||
public ConnectionObserverStatsBridge getDefaultJdbcObserver() {
|
||||
return defaultJdbcObservers;
|
||||
}
|
||||
|
||||
}
|
|
@ -105,7 +105,7 @@ public final class IteratorImpl implements HibernateIterator {
|
|||
try {
|
||||
boolean isHolder = holderInstantiator.isRequired();
|
||||
|
||||
LOG.debugf( "Assembling results" );
|
||||
LOG.debug( "Assembling results" );
|
||||
if ( single && !isHolder ) {
|
||||
currentResult = types[0].nullSafeGet( rs, names[0], session, null );
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ public final class IteratorImpl implements HibernateIterator {
|
|||
}
|
||||
|
||||
postNext();
|
||||
LOG.debugf( "Returning current results" );
|
||||
LOG.debug( "Returning current results" );
|
||||
return currentResult;
|
||||
}
|
||||
catch (SQLException sqle) {
|
||||
|
|
|
@ -7,81 +7,73 @@
|
|||
package org.hibernate.internal;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
|
||||
import org.hibernate.engine.spi.SessionEventListenerManager;
|
||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||
import org.hibernate.resource.jdbc.spi.JdbcObserver;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class JdbcObserverImpl implements JdbcObserver {
|
||||
private final SharedSessionContractImplementor session;
|
||||
private final transient List<ConnectionObserver> observers;
|
||||
public final class JdbcObserverImpl implements JdbcObserver {
|
||||
|
||||
public JdbcObserverImpl(SharedSessionContractImplementor session) {
|
||||
private final ConnectionObserverStatsBridge observer;
|
||||
private final SessionEventListenerManager eventListenerManager;
|
||||
private final SharedSessionContractImplementor session;
|
||||
|
||||
public JdbcObserverImpl(SharedSessionContractImplementor session, FastSessionServices fastSessionServices) {
|
||||
this.session = session;
|
||||
this.observers = new ArrayList<>();
|
||||
this.observers.add( new ConnectionObserverStatsBridge( session.getFactory() ) );
|
||||
this.observer = fastSessionServices.getDefaultJdbcObserver();
|
||||
this.eventListenerManager = session.getEventListenerManager();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionAcquisitionStart() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionAcquisitionEnd(Connection connection) {
|
||||
for ( ConnectionObserver observer : observers ) {
|
||||
observer.physicalConnectionObtained( connection );
|
||||
}
|
||||
observer.physicalConnectionObtained( connection );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionReleaseStart() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcConnectionReleaseEnd() {
|
||||
for ( ConnectionObserver observer : observers ) {
|
||||
observer.physicalConnectionReleased();
|
||||
}
|
||||
observer.physicalConnectionReleased();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcPrepareStatementStart() {
|
||||
session.getEventListenerManager().jdbcPrepareStatementStart();
|
||||
eventListenerManager.jdbcPrepareStatementStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcPrepareStatementEnd() {
|
||||
for ( ConnectionObserver observer : observers ) {
|
||||
observer.statementPrepared();
|
||||
}
|
||||
session.getEventListenerManager().jdbcPrepareStatementEnd();
|
||||
observer.statementPrepared();
|
||||
eventListenerManager.jdbcPrepareStatementEnd();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteStatementStart() {
|
||||
session.getEventListenerManager().jdbcExecuteStatementStart();
|
||||
eventListenerManager.jdbcExecuteStatementStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteStatementEnd() {
|
||||
session.getEventListenerManager().jdbcExecuteStatementEnd();
|
||||
eventListenerManager.jdbcExecuteStatementEnd();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteBatchStart() {
|
||||
session.getEventListenerManager().jdbcExecuteBatchStart();
|
||||
eventListenerManager.jdbcExecuteBatchStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jdbcExecuteBatchEnd() {
|
||||
session.getEventListenerManager().jdbcExecuteBatchEnd();
|
||||
eventListenerManager.jdbcExecuteBatchEnd();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,12 +28,15 @@ public class JdbcSessionContextImpl implements JdbcSessionContext {
|
|||
private final transient ServiceRegistry serviceRegistry;
|
||||
private final transient JdbcObserver jdbcObserver;
|
||||
|
||||
public JdbcSessionContextImpl(SharedSessionContractImplementor session, StatementInspector statementInspector) {
|
||||
public JdbcSessionContextImpl(
|
||||
SharedSessionContractImplementor session,
|
||||
StatementInspector statementInspector,
|
||||
FastSessionServices fastSessionServices) {
|
||||
this.sessionFactory = session.getFactory();
|
||||
this.statementInspector = statementInspector;
|
||||
this.connectionHandlingMode = settings().getPhysicalConnectionHandlingMode();
|
||||
this.serviceRegistry = sessionFactory.getServiceRegistry();
|
||||
this.jdbcObserver = new JdbcObserverImpl( session );
|
||||
this.jdbcObserver = new JdbcObserverImpl( session, fastSessionServices );
|
||||
|
||||
if ( this.statementInspector == null ) {
|
||||
throw new IllegalArgumentException( "StatementInspector cannot be null" );
|
||||
|
|
|
@ -9,6 +9,7 @@ package org.hibernate.internal;
|
|||
import java.io.Serializable;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.hibernate.SessionEventListener;
|
||||
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
|
||||
|
@ -24,6 +25,8 @@ public class NonContextualJdbcConnectionAccess implements JdbcConnectionAccess,
|
|||
public NonContextualJdbcConnectionAccess(
|
||||
SessionEventListener listener,
|
||||
ConnectionProvider connectionProvider) {
|
||||
Objects.requireNonNull( listener );
|
||||
Objects.requireNonNull( connectionProvider );
|
||||
this.listener = listener;
|
||||
this.connectionProvider = connectionProvider;
|
||||
}
|
||||
|
|
|
@ -7,10 +7,12 @@
|
|||
package org.hibernate.internal;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.hibernate.FlushMode;
|
||||
import org.hibernate.Interceptor;
|
||||
import org.hibernate.SessionEventListener;
|
||||
import org.hibernate.engine.spi.SessionOwner;
|
||||
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
|
||||
import org.hibernate.resource.jdbc.spi.StatementInspector;
|
||||
|
@ -45,6 +47,12 @@ public interface SessionCreationOptions {
|
|||
|
||||
TimeZone getJdbcTimeZone();
|
||||
|
||||
/**
|
||||
* @return the full list of SessionEventListener if this was customized,
|
||||
* or null if this Session is being created with the default list.
|
||||
*/
|
||||
List<SessionEventListener> getCustomSessionEventListener();
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// deprecations
|
||||
|
||||
|
|
|
@ -68,6 +68,7 @@ import org.hibernate.engine.config.spi.ConfigurationService;
|
|||
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
|
||||
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
|
||||
import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider;
|
||||
import org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator;
|
||||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||
import org.hibernate.engine.jndi.spi.JndiService;
|
||||
import org.hibernate.engine.profile.Association;
|
||||
|
@ -196,6 +197,10 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
|
||||
private final transient TypeHelper typeHelper;
|
||||
|
||||
private final transient FastSessionServices fastSessionServices;
|
||||
private final transient SessionBuilder defaultSessionOpenOptions;
|
||||
private final transient SessionBuilder temporarySessionOpenOptions;
|
||||
|
||||
public SessionFactoryImpl(
|
||||
final MetadataImplementor metadata,
|
||||
SessionFactoryOptions options) {
|
||||
|
@ -374,6 +379,13 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
fetchProfiles.put( fetchProfile.getName(), fetchProfile );
|
||||
}
|
||||
|
||||
this.defaultSessionOpenOptions = withOptions();
|
||||
this.temporarySessionOpenOptions = withOptions()
|
||||
.autoClose( false )
|
||||
.flushMode( FlushMode.MANUAL )
|
||||
.connectionHandlingMode( PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_STATEMENT );
|
||||
this.fastSessionServices = new FastSessionServices( this );
|
||||
|
||||
this.observer.sessionFactoryCreated( this );
|
||||
|
||||
SessionFactoryRegistry.INSTANCE.addSessionFactory(
|
||||
|
@ -458,41 +470,22 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
}
|
||||
|
||||
private JdbcConnectionAccess buildLocalConnectionAccess() {
|
||||
return new JdbcConnectionAccess() {
|
||||
@Override
|
||||
public Connection obtainConnection() throws SQLException {
|
||||
return !settings.getMultiTenancyStrategy().requiresMultiTenantConnectionProvider()
|
||||
? serviceRegistry.getService( ConnectionProvider.class ).getConnection()
|
||||
: serviceRegistry.getService( MultiTenantConnectionProvider.class ).getAnyConnection();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseConnection(Connection connection) throws SQLException {
|
||||
if ( !settings.getMultiTenancyStrategy().requiresMultiTenantConnectionProvider() ) {
|
||||
serviceRegistry.getService( ConnectionProvider.class ).closeConnection( connection );
|
||||
}
|
||||
else {
|
||||
serviceRegistry.getService( MultiTenantConnectionProvider.class ).releaseAnyConnection( connection );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsAggressiveRelease() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
if ( settings.getMultiTenancyStrategy().requiresMultiTenantConnectionProvider() ) {
|
||||
final MultiTenantConnectionProvider mTenantConnectionProvider = serviceRegistry.getService( MultiTenantConnectionProvider.class );
|
||||
return new JdbcEnvironmentInitiator.MultiTenantConnectionProviderJdbcConnectionAccess( mTenantConnectionProvider );
|
||||
}
|
||||
else {
|
||||
final ConnectionProvider connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
|
||||
return new JdbcEnvironmentInitiator.ConnectionProviderJdbcConnectionAccess( connectionProvider );
|
||||
}
|
||||
}
|
||||
|
||||
public Session openSession() throws HibernateException {
|
||||
return withOptions().openSession();
|
||||
return this.defaultSessionOpenOptions.openSession();
|
||||
}
|
||||
|
||||
public Session openTemporarySession() throws HibernateException {
|
||||
return withOptions()
|
||||
.autoClose( false )
|
||||
.flushMode( FlushMode.MANUAL )
|
||||
.connectionHandlingMode( PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_STATEMENT )
|
||||
.openSession();
|
||||
return this.temporarySessionOpenOptions.openSession();
|
||||
}
|
||||
|
||||
public Session getCurrentSession() throws HibernateException {
|
||||
|
@ -1166,11 +1159,13 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
private TimeZone jdbcTimeZone;
|
||||
private boolean queryParametersValidationEnabled;
|
||||
|
||||
// Lazy: defaults can be built by invoking the builder in fastSessionServices.defaultSessionEventListeners
|
||||
// (Need a fresh build for each Session as the listener instances can't be reused across sessions)
|
||||
// Only initialize of the builder is overriding the default.
|
||||
private List<SessionEventListener> listeners;
|
||||
|
||||
//todo : expose setting
|
||||
private SessionOwnerBehavior sessionOwnerBehavior = SessionOwnerBehavior.LEGACY_NATIVE;
|
||||
private PersistenceUnitTransactionType persistenceUnitTransactionType;
|
||||
|
||||
SessionBuilderImpl(SessionFactoryImpl sessionFactory) {
|
||||
this.sessionFactory = sessionFactory;
|
||||
|
@ -1189,9 +1184,7 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
tenantIdentifier = currentTenantIdentifierResolver.resolveCurrentTenantIdentifier();
|
||||
}
|
||||
this.jdbcTimeZone = sessionFactoryOptions.getJdbcTimeZone();
|
||||
|
||||
listeners = sessionFactoryOptions.getBaselineSessionEventsListenerBuilder().buildBaselineList();
|
||||
queryParametersValidationEnabled = sessionFactoryOptions.isQueryParametersValidationEnabled();
|
||||
this.queryParametersValidationEnabled = sessionFactoryOptions.isQueryParametersValidationEnabled();
|
||||
}
|
||||
|
||||
|
||||
|
@ -1279,20 +1272,18 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
return jdbcTimeZone;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SessionEventListener> getCustomSessionEventListener() {
|
||||
return listeners;
|
||||
}
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// SessionBuilder
|
||||
|
||||
@Override
|
||||
public Session openSession() {
|
||||
log.tracef( "Opening Hibernate Session. tenant=%s", tenantIdentifier );
|
||||
final SessionImpl session = new SessionImpl( sessionFactory, this );
|
||||
|
||||
final SessionEventListenerManager eventListenerManager = session.getEventListenerManager();
|
||||
for ( SessionEventListener listener : listeners ) {
|
||||
eventListenerManager.addListener( listener );
|
||||
}
|
||||
|
||||
return session;
|
||||
return new SessionImpl( sessionFactory, this );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1388,6 +1379,11 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public T eventListeners(SessionEventListener... listeners) {
|
||||
if ( this.listeners == null ) {
|
||||
this.listeners = sessionFactory.getSessionFactoryOptions()
|
||||
.getBaselineSessionEventsListenerBuilder()
|
||||
.buildBaselineList();
|
||||
}
|
||||
Collections.addAll( this.listeners, listeners );
|
||||
return (T) this;
|
||||
}
|
||||
|
@ -1395,7 +1391,13 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public T clearEventListeners() {
|
||||
listeners.clear();
|
||||
if ( listeners == null ) {
|
||||
//Needs to initialize explicitly to an empty list as otherwise "null" immplies the default listeners will be applied
|
||||
this.listeners = new ArrayList<SessionEventListener>( 3 );
|
||||
}
|
||||
else {
|
||||
listeners.clear();
|
||||
}
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
|
@ -1495,6 +1497,11 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
return sessionFactory.getSessionFactoryOptions().getJdbcTimeZone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SessionEventListener> getCustomSessionEventListener() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SessionOwner getSessionOwner() {
|
||||
return null;
|
||||
|
@ -1659,4 +1666,12 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
LOG.emptyCompositesEnabled();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the FastSessionServices for this SessionFactory.
|
||||
*/
|
||||
FastSessionServices getFastSessionServices() {
|
||||
return this.fastSessionServices;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -155,7 +155,7 @@ public class SessionFactoryRegistry {
|
|||
final SessionFactory sessionFactory = sessionFactoryMap.get( uuid );
|
||||
if ( sessionFactory == null && LOG.isDebugEnabled() ) {
|
||||
LOG.debugf( "Not found: %s", uuid );
|
||||
LOG.debugf( sessionFactoryMap.toString() );
|
||||
LOG.debug( sessionFactoryMap.toString() );
|
||||
}
|
||||
return sessionFactory;
|
||||
}
|
||||
|
|
|
@ -96,8 +96,6 @@ import org.hibernate.engine.spi.Status;
|
|||
import org.hibernate.engine.spi.TypedValue;
|
||||
import org.hibernate.engine.transaction.spi.TransactionImplementor;
|
||||
import org.hibernate.engine.transaction.spi.TransactionObserver;
|
||||
import org.hibernate.event.service.spi.EventListenerGroup;
|
||||
import org.hibernate.event.service.spi.EventListenerRegistry;
|
||||
import org.hibernate.event.spi.AutoFlushEvent;
|
||||
import org.hibernate.event.spi.AutoFlushEventListener;
|
||||
import org.hibernate.event.spi.ClearEvent;
|
||||
|
@ -107,7 +105,6 @@ import org.hibernate.event.spi.DeleteEventListener;
|
|||
import org.hibernate.event.spi.DirtyCheckEvent;
|
||||
import org.hibernate.event.spi.DirtyCheckEventListener;
|
||||
import org.hibernate.event.spi.EventSource;
|
||||
import org.hibernate.event.spi.EventType;
|
||||
import org.hibernate.event.spi.EvictEvent;
|
||||
import org.hibernate.event.spi.EvictEventListener;
|
||||
import org.hibernate.event.spi.FlushEvent;
|
||||
|
@ -148,6 +145,7 @@ import org.hibernate.jpa.internal.util.CacheModeHelper;
|
|||
import org.hibernate.jpa.internal.util.ConfigurationHelper;
|
||||
import org.hibernate.jpa.internal.util.FlushModeTypeHelper;
|
||||
import org.hibernate.jpa.internal.util.LockModeTypeHelper;
|
||||
import org.hibernate.jpa.internal.util.LockOptionsHelper;
|
||||
import org.hibernate.jpa.spi.HibernateEntityManagerImplementor;
|
||||
import org.hibernate.loader.criteria.CriteriaLoader;
|
||||
import org.hibernate.loader.custom.CustomLoader;
|
||||
|
@ -170,8 +168,6 @@ import org.hibernate.query.internal.CollectionFilterImpl;
|
|||
import org.hibernate.query.spi.ScrollableResultsImplementor;
|
||||
import org.hibernate.resource.transaction.TransactionRequiredForJoinException;
|
||||
import org.hibernate.resource.transaction.backend.jta.internal.JtaTransactionCoordinatorImpl;
|
||||
import org.hibernate.resource.transaction.backend.jta.internal.synchronization.AfterCompletionAction;
|
||||
import org.hibernate.resource.transaction.backend.jta.internal.synchronization.ManagedFlushChecker;
|
||||
import org.hibernate.resource.transaction.spi.TransactionCoordinator;
|
||||
import org.hibernate.resource.transaction.spi.TransactionStatus;
|
||||
import org.hibernate.stat.SessionStatistics;
|
||||
|
@ -197,31 +193,22 @@ import static org.hibernate.cfg.AvailableSettings.JPA_SHARED_CACHE_STORE_MODE;
|
|||
* @author Steve Ebersole
|
||||
* @author Brett Meyer
|
||||
* @author Chris Cranford
|
||||
* @author Sanne Grinovero
|
||||
*/
|
||||
public final class SessionImpl
|
||||
extends AbstractSessionImpl
|
||||
implements EventSource, SessionImplementor, HibernateEntityManagerImplementor {
|
||||
private static final EntityManagerMessageLogger log = HEMLogging.messageLogger( SessionImpl.class );
|
||||
|
||||
|
||||
private static final String[] ENTITY_MANAGER_SPECIFIC_PROPERTIES = {
|
||||
JPA_LOCK_SCOPE,
|
||||
JPA_LOCK_TIMEOUT,
|
||||
AvailableSettings.FLUSH_MODE,
|
||||
JPA_SHARED_CACHE_RETRIEVE_MODE,
|
||||
JPA_SHARED_CACHE_STORE_MODE,
|
||||
QueryHints.SPEC_HINT_TIMEOUT
|
||||
};
|
||||
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
// Defaults to null which means the properties are the default - as defined in FastSessionServices#defaultSessionProperties
|
||||
private Map<String, Object> properties;
|
||||
|
||||
private transient ActionQueue actionQueue;
|
||||
private transient StatefulPersistenceContext persistenceContext;
|
||||
|
||||
private transient LoadQueryInfluencers loadQueryInfluencers;
|
||||
|
||||
// todo : (5.2) HEM always initialized this. Is that really needed?
|
||||
private LockOptions lockOptions = new LockOptions();
|
||||
private LockOptions lockOptions;
|
||||
|
||||
private boolean autoClear;
|
||||
private boolean autoClose;
|
||||
|
@ -231,10 +218,7 @@ public final class SessionImpl
|
|||
|
||||
private transient LoadEvent loadEvent; //cached LoadEvent instance
|
||||
|
||||
private transient boolean discardOnClose;
|
||||
|
||||
private transient TransactionObserver transactionObserver;
|
||||
private transient EventListenerRegistry eventListenerRegistry;
|
||||
|
||||
public SessionImpl(SessionFactoryImpl factory, SessionCreationOptions options) {
|
||||
super( factory, options );
|
||||
|
@ -246,8 +230,6 @@ public final class SessionImpl
|
|||
this.autoClose = options.shouldAutoClose();
|
||||
this.queryParametersValidationEnabled = options.isQueryParametersValidationEnabled();
|
||||
|
||||
this.discardOnClose = factory.getSessionFactoryOptions().isReleaseResourcesOnCloseEnabled();
|
||||
|
||||
if ( options instanceof SharedSessionCreationOptions ) {
|
||||
final SharedSessionCreationOptions sharedOptions = (SharedSessionCreationOptions) options;
|
||||
final ActionQueue.TransactionCompletionProcesses transactionCompletionProcesses = sharedOptions.getTransactionCompletionProcesses();
|
||||
|
@ -266,67 +248,53 @@ public final class SessionImpl
|
|||
statistics.openSession();
|
||||
}
|
||||
|
||||
if ( this.properties != null ) {
|
||||
//There might be custom properties for this session that affect the LockOptions state
|
||||
LockOptionsHelper.applyPropertiesToLockOptions( this.properties, this::getLockOptionsForWrite );
|
||||
}
|
||||
getSession().setCacheMode( fastSessionServices.initialSessionCacheMode );
|
||||
|
||||
// NOTE : pulse() already handles auto-join-ability correctly
|
||||
getTransactionCoordinator().pulse();
|
||||
|
||||
setDefaultProperties();
|
||||
applyProperties();
|
||||
|
||||
if ( log.isTraceEnabled() ) {
|
||||
log.tracef( "Opened Session [%s] at timestamp: %s", getSessionIdentifier(), getTimestamp() );
|
||||
}
|
||||
}
|
||||
|
||||
private void setDefaultProperties() {
|
||||
properties.putIfAbsent( AvailableSettings.FLUSH_MODE, getHibernateFlushMode().name() );
|
||||
properties.putIfAbsent( JPA_LOCK_SCOPE, PessimisticLockScope.EXTENDED.name() );
|
||||
properties.putIfAbsent( JPA_LOCK_TIMEOUT, LockOptions.WAIT_FOREVER );
|
||||
properties.putIfAbsent( JPA_SHARED_CACHE_RETRIEVE_MODE, CacheModeHelper.DEFAULT_RETRIEVE_MODE );
|
||||
properties.putIfAbsent( JPA_SHARED_CACHE_STORE_MODE, CacheModeHelper.DEFAULT_STORE_MODE );
|
||||
private LockOptions getLockOptionsForRead() {
|
||||
return this.lockOptions == null ? fastSessionServices.defaultLockOptions : this.lockOptions;
|
||||
}
|
||||
|
||||
|
||||
private void applyProperties() {
|
||||
applyEntityManagerSpecificProperties();
|
||||
setHibernateFlushMode( ConfigurationHelper.getFlushMode( properties.get( AvailableSettings.FLUSH_MODE ), FlushMode.AUTO ) );
|
||||
setLockOptions( this.properties, this.lockOptions );
|
||||
getSession().setCacheMode(
|
||||
CacheModeHelper.interpretCacheMode(
|
||||
currentCacheStoreMode(),
|
||||
currentCacheRetrieveMode()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private void applyEntityManagerSpecificProperties() {
|
||||
final Map<String, Object> properties = getFactory().getProperties();
|
||||
for ( String key : ENTITY_MANAGER_SPECIFIC_PROPERTIES ) {
|
||||
if ( properties.containsKey( key ) ) {
|
||||
this.properties.put( key, properties.get( key ) );
|
||||
}
|
||||
private LockOptions getLockOptionsForWrite() {
|
||||
if ( this.lockOptions == null ) {
|
||||
this.lockOptions = new LockOptions();
|
||||
}
|
||||
return this.lockOptions;
|
||||
}
|
||||
|
||||
protected void applyQuerySettingsAndHints(Query query) {
|
||||
if ( lockOptions.getLockMode() != LockMode.NONE ) {
|
||||
query.setLockMode( getLockMode( lockOptions.getLockMode() ) );
|
||||
final LockOptions lockOptionsForRead = getLockOptionsForRead();
|
||||
if ( lockOptionsForRead.getLockMode() != LockMode.NONE ) {
|
||||
query.setLockMode( getLockMode( lockOptionsForRead.getLockMode() ) );
|
||||
}
|
||||
final Object queryTimeout;
|
||||
if ( ( queryTimeout = properties.get( QueryHints.SPEC_HINT_TIMEOUT ) ) != null ) {
|
||||
if ( ( queryTimeout = getSessionProperty( QueryHints.SPEC_HINT_TIMEOUT ) ) != null ) {
|
||||
query.setHint( QueryHints.SPEC_HINT_TIMEOUT, queryTimeout );
|
||||
}
|
||||
final Object lockTimeout;
|
||||
if ( ( lockTimeout = properties.get( JPA_LOCK_TIMEOUT ) ) != null ) {
|
||||
if ( ( lockTimeout = getSessionProperty( JPA_LOCK_TIMEOUT ) ) != null ) {
|
||||
query.setHint( JPA_LOCK_TIMEOUT, lockTimeout );
|
||||
}
|
||||
}
|
||||
|
||||
private CacheRetrieveMode currentCacheRetrieveMode() {
|
||||
return determineCacheRetrieveMode( properties );
|
||||
}
|
||||
|
||||
private CacheStoreMode currentCacheStoreMode() {
|
||||
return determineCacheStoreMode( properties );
|
||||
private Object getSessionProperty(final String name) {
|
||||
if ( properties == null ) {
|
||||
return fastSessionServices.defaultSessionProperties.get( name );
|
||||
}
|
||||
else {
|
||||
return properties.get( name );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -354,13 +322,12 @@ public final class SessionImpl
|
|||
persistenceContext.clear();
|
||||
actionQueue.clear();
|
||||
|
||||
final ClearEvent event = new ClearEvent( this );
|
||||
for ( ClearEventListener listener : listeners( EventType.CLEAR ) ) {
|
||||
listener.onClear( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_CLEAR.fireLazyEventOnEachListener( this::createClearEvent, ClearEventListener::onClear );
|
||||
}
|
||||
|
||||
|
||||
private ClearEvent createClearEvent() {
|
||||
return new ClearEvent( this );
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("StatementWithEmptyBody")
|
||||
|
@ -388,7 +355,7 @@ public final class SessionImpl
|
|||
// Original hibernate-entitymanager EM#close behavior
|
||||
checkSessionFactoryOpen();
|
||||
checkOpenOrWaitingForAutoClose();
|
||||
if ( discardOnClose || !isTransactionInProgress( false ) ) {
|
||||
if ( fastSessionServices.discardOnClose || !isTransactionInProgress( false ) ) {
|
||||
super.close();
|
||||
}
|
||||
else {
|
||||
|
@ -422,7 +389,8 @@ public final class SessionImpl
|
|||
return super.shouldCloseJdbcCoordinatorOnClose( isTransactionCoordinatorShared );
|
||||
}
|
||||
|
||||
if ( getActionQueue().hasBeforeTransactionActions() || getActionQueue().hasAfterTransactionActions() ) {
|
||||
final ActionQueue actionQueue = getActionQueue();
|
||||
if ( actionQueue.hasBeforeTransactionActions() || actionQueue.hasAfterTransactionActions() ) {
|
||||
log.warn(
|
||||
"On close, shared Session had before/after transaction actions that have not yet been processed"
|
||||
);
|
||||
|
@ -459,10 +427,6 @@ public final class SessionImpl
|
|||
}
|
||||
}
|
||||
|
||||
private boolean isFlushModeNever() {
|
||||
return FlushMode.isManualFlushMode( getHibernateFlushMode() );
|
||||
}
|
||||
|
||||
private void managedFlush() {
|
||||
if ( isClosed() && !waitingForAutoClose ) {
|
||||
log.trace( "Skipping auto-flush due to session closed" );
|
||||
|
@ -628,28 +592,14 @@ public final class SessionImpl
|
|||
fireSaveOrUpdate( new SaveOrUpdateEvent( entityName, obj, this ) );
|
||||
}
|
||||
|
||||
private void fireSaveOrUpdate(SaveOrUpdateEvent event) {
|
||||
private void fireSaveOrUpdate(final SaveOrUpdateEvent event) {
|
||||
checkOpen();
|
||||
checkTransactionSynchStatus();
|
||||
checkNoUnresolvedActionsBeforeOperation();
|
||||
for ( SaveOrUpdateEventListener listener : listeners( EventType.SAVE_UPDATE ) ) {
|
||||
listener.onSaveOrUpdate( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_SAVE_UPDATE.fireEventOnEachListener( event, SaveOrUpdateEventListener::onSaveOrUpdate );
|
||||
checkNoUnresolvedActionsAfterOperation();
|
||||
}
|
||||
|
||||
private <T> Iterable<T> listeners(EventType<T> type) {
|
||||
return eventListenerGroup( type ).listeners();
|
||||
}
|
||||
|
||||
private <T> EventListenerGroup<T> eventListenerGroup(EventType<T> type) {
|
||||
if ( this.eventListenerRegistry == null ) {
|
||||
this.eventListenerRegistry = getFactory().getServiceRegistry().getService( EventListenerRegistry.class );
|
||||
}
|
||||
return eventListenerRegistry.getEventListenerGroup( type );
|
||||
}
|
||||
|
||||
|
||||
// save() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@Override
|
||||
|
@ -662,13 +612,11 @@ public final class SessionImpl
|
|||
return fireSave( new SaveOrUpdateEvent( entityName, object, this ) );
|
||||
}
|
||||
|
||||
private Serializable fireSave(SaveOrUpdateEvent event) {
|
||||
private Serializable fireSave(final SaveOrUpdateEvent event) {
|
||||
checkOpen();
|
||||
checkTransactionSynchStatus();
|
||||
checkNoUnresolvedActionsBeforeOperation();
|
||||
for ( SaveOrUpdateEventListener listener : listeners( EventType.SAVE ) ) {
|
||||
listener.onSaveOrUpdate( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_SAVE.fireEventOnEachListener( event, SaveOrUpdateEventListener::onSaveOrUpdate );
|
||||
checkNoUnresolvedActionsAfterOperation();
|
||||
return event.getResultId();
|
||||
}
|
||||
|
@ -690,9 +638,7 @@ public final class SessionImpl
|
|||
checkOpen();
|
||||
checkTransactionSynchStatus();
|
||||
checkNoUnresolvedActionsBeforeOperation();
|
||||
for ( SaveOrUpdateEventListener listener : listeners( EventType.UPDATE ) ) {
|
||||
listener.onSaveOrUpdate( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_UPDATE.fireEventOnEachListener( event, SaveOrUpdateEventListener::onSaveOrUpdate );
|
||||
checkNoUnresolvedActionsAfterOperation();
|
||||
}
|
||||
|
||||
|
@ -725,13 +671,10 @@ public final class SessionImpl
|
|||
private void fireLock(LockEvent event) {
|
||||
checkOpen();
|
||||
pulseTransactionCoordinator();
|
||||
for ( LockEventListener listener : listeners( EventType.LOCK ) ) {
|
||||
listener.onLock( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_LOCK.fireEventOnEachListener( event, LockEventListener::onLock );
|
||||
delayedAfterCompletion();
|
||||
}
|
||||
|
||||
|
||||
// persist() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@Override
|
||||
|
@ -752,14 +695,12 @@ public final class SessionImpl
|
|||
firePersist( copiedAlready, new PersistEvent( entityName, object, this ) );
|
||||
}
|
||||
|
||||
private void firePersist(PersistEvent event) {
|
||||
private void firePersist(final PersistEvent event) {
|
||||
try {
|
||||
checkTransactionSynchStatus();
|
||||
checkNoUnresolvedActionsBeforeOperation();
|
||||
|
||||
for ( PersistEventListener listener : listeners( EventType.PERSIST ) ) {
|
||||
listener.onPersist( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_PERSIST.fireEventOnEachListener( event, PersistEventListener::onPersist );
|
||||
}
|
||||
catch (MappingException e) {
|
||||
throw getExceptionConverter().convert( new IllegalArgumentException( e.getMessage() ) );
|
||||
|
@ -777,13 +718,13 @@ public final class SessionImpl
|
|||
}
|
||||
}
|
||||
|
||||
private void firePersist(Map copiedAlready, PersistEvent event) {
|
||||
private void firePersist(final Map copiedAlready, final PersistEvent event) {
|
||||
pulseTransactionCoordinator();
|
||||
|
||||
try {
|
||||
for ( PersistEventListener listener : listeners( EventType.PERSIST ) ) {
|
||||
listener.onPersist( event, copiedAlready );
|
||||
}
|
||||
//Uses a capturing lambda in this case as we need to carry the additional Map parameter:
|
||||
fastSessionServices.eventListenerGroup_PERSIST
|
||||
.fireEventOnEachListener( event, copiedAlready, PersistEventListener::onPersist );
|
||||
}
|
||||
catch ( MappingException e ) {
|
||||
throw getExceptionConverter().convert( new IllegalArgumentException( e.getMessage() ) ) ;
|
||||
|
@ -799,41 +740,15 @@ public final class SessionImpl
|
|||
|
||||
// persistOnFlush() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
public void persistOnFlush(String entityName, Object object)
|
||||
throws HibernateException {
|
||||
firePersistOnFlush( new PersistEvent( entityName, object, this ) );
|
||||
}
|
||||
|
||||
public void persistOnFlush(Object object) throws HibernateException {
|
||||
persist( null, object );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void persistOnFlush(String entityName, Object object, Map copiedAlready)
|
||||
throws HibernateException {
|
||||
firePersistOnFlush( copiedAlready, new PersistEvent( entityName, object, this ) );
|
||||
}
|
||||
|
||||
private void firePersistOnFlush(Map copiedAlready, PersistEvent event) {
|
||||
public void persistOnFlush(String entityName, Object object, Map copiedAlready) {
|
||||
checkOpenOrWaitingForAutoClose();
|
||||
pulseTransactionCoordinator();
|
||||
for ( PersistEventListener listener : listeners( EventType.PERSIST_ONFLUSH ) ) {
|
||||
listener.onPersist( event, copiedAlready );
|
||||
}
|
||||
PersistEvent event = new PersistEvent( entityName, object, this );
|
||||
fastSessionServices.eventListenerGroup_PERSIST_ONFLUSH.fireEventOnEachListener( event, copiedAlready, PersistEventListener::onPersist );
|
||||
delayedAfterCompletion();
|
||||
}
|
||||
|
||||
private void firePersistOnFlush(PersistEvent event) {
|
||||
checkOpen();
|
||||
checkTransactionSynchStatus();
|
||||
checkNoUnresolvedActionsBeforeOperation();
|
||||
for ( PersistEventListener listener : listeners( EventType.PERSIST_ONFLUSH ) ) {
|
||||
listener.onPersist( event );
|
||||
}
|
||||
checkNoUnresolvedActionsAfterOperation();
|
||||
}
|
||||
|
||||
|
||||
// merge() operations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@Override
|
||||
|
@ -858,9 +773,7 @@ public final class SessionImpl
|
|||
try {
|
||||
checkTransactionSynchStatus();
|
||||
checkNoUnresolvedActionsBeforeOperation();
|
||||
for ( MergeEventListener listener : listeners( EventType.MERGE ) ) {
|
||||
listener.onMerge( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_MERGE.fireEventOnEachListener( event, MergeEventListener::onMerge );
|
||||
checkNoUnresolvedActionsAfterOperation();
|
||||
}
|
||||
catch ( ObjectDeletedException sse ) {
|
||||
|
@ -877,12 +790,10 @@ public final class SessionImpl
|
|||
return event.getResult();
|
||||
}
|
||||
|
||||
private void fireMerge(Map copiedAlready, MergeEvent event) {
|
||||
private void fireMerge(final Map copiedAlready, final MergeEvent event) {
|
||||
try {
|
||||
pulseTransactionCoordinator();
|
||||
for ( MergeEventListener listener : listeners( EventType.MERGE ) ) {
|
||||
listener.onMerge( event, copiedAlready );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_MERGE.fireEventOnEachListener( event, copiedAlready, MergeEventListener::onMerge );
|
||||
}
|
||||
catch ( ObjectDeletedException sse ) {
|
||||
throw getExceptionConverter().convert( new IllegalArgumentException( sse ) );
|
||||
|
@ -918,7 +829,9 @@ public final class SessionImpl
|
|||
public void delete(String entityName, Object object, boolean isCascadeDeleteEnabled, Set transientEntities)
|
||||
throws HibernateException {
|
||||
checkOpenOrWaitingForAutoClose();
|
||||
if ( log.isTraceEnabled() && persistenceContext.isRemovingOrphanBeforeUpates() ) {
|
||||
final boolean removingOrphanBeforeUpates = persistenceContext.isRemovingOrphanBeforeUpates();
|
||||
final boolean traceEnabled = log.isTraceEnabled();
|
||||
if ( traceEnabled && removingOrphanBeforeUpates ) {
|
||||
logRemoveOrphanBeforeUpdates( "before continuing", entityName, object );
|
||||
}
|
||||
fireDelete(
|
||||
|
@ -926,12 +839,12 @@ public final class SessionImpl
|
|||
entityName,
|
||||
object,
|
||||
isCascadeDeleteEnabled,
|
||||
persistenceContext.isRemovingOrphanBeforeUpates(),
|
||||
removingOrphanBeforeUpates,
|
||||
this
|
||||
),
|
||||
transientEntities
|
||||
);
|
||||
if ( log.isTraceEnabled() && persistenceContext.isRemovingOrphanBeforeUpates() ) {
|
||||
if ( traceEnabled && removingOrphanBeforeUpates ) {
|
||||
logRemoveOrphanBeforeUpdates( "after continuing", entityName, object );
|
||||
}
|
||||
}
|
||||
|
@ -940,7 +853,8 @@ public final class SessionImpl
|
|||
public void removeOrphanBeforeUpdates(String entityName, Object child) {
|
||||
// TODO: The removeOrphan concept is a temporary "hack" for HHH-6484. This should be removed once action/task
|
||||
// ordering is improved.
|
||||
if ( log.isTraceEnabled() ) {
|
||||
final boolean traceEnabled = log.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
logRemoveOrphanBeforeUpdates( "begin", entityName, child );
|
||||
}
|
||||
persistenceContext.beginRemoveOrphanBeforeUpdates();
|
||||
|
@ -950,27 +864,27 @@ public final class SessionImpl
|
|||
}
|
||||
finally {
|
||||
persistenceContext.endRemoveOrphanBeforeUpdates();
|
||||
if ( log.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
logRemoveOrphanBeforeUpdates( "end", entityName, child );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void logRemoveOrphanBeforeUpdates(String timing, String entityName, Object entity) {
|
||||
final EntityEntry entityEntry = persistenceContext.getEntry( entity );
|
||||
log.tracef(
|
||||
"%s remove orphan before updates: [%s]",
|
||||
timing,
|
||||
entityEntry == null ? entityName : MessageHelper.infoString( entityName, entityEntry.getId() )
|
||||
);
|
||||
if ( log.isTraceEnabled() ) {
|
||||
final EntityEntry entityEntry = persistenceContext.getEntry( entity );
|
||||
log.tracef(
|
||||
"%s remove orphan before updates: [%s]",
|
||||
timing,
|
||||
entityEntry == null ? entityName : MessageHelper.infoString( entityName, entityEntry.getId() )
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private void fireDelete(DeleteEvent event) {
|
||||
private void fireDelete(final DeleteEvent event) {
|
||||
try{
|
||||
pulseTransactionCoordinator();
|
||||
for ( DeleteEventListener listener : listeners( EventType.DELETE ) ) {
|
||||
listener.onDelete( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_DELETE.fireEventOnEachListener( event, DeleteEventListener::onDelete );
|
||||
}
|
||||
catch ( ObjectDeletedException sse ) {
|
||||
throw getExceptionConverter().convert( new IllegalArgumentException( sse ) );
|
||||
|
@ -987,12 +901,10 @@ public final class SessionImpl
|
|||
}
|
||||
}
|
||||
|
||||
private void fireDelete(DeleteEvent event, Set transientEntities) {
|
||||
private void fireDelete(final DeleteEvent event, final Set transientEntities) {
|
||||
try{
|
||||
pulseTransactionCoordinator();
|
||||
for ( DeleteEventListener listener : listeners( EventType.DELETE ) ) {
|
||||
listener.onDelete( event, transientEntities );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_DELETE.fireEventOnEachListener( event, transientEntities, DeleteEventListener::onDelete );
|
||||
}
|
||||
catch ( ObjectDeletedException sse ) {
|
||||
throw getExceptionConverter().convert( new IllegalArgumentException( sse ) );
|
||||
|
@ -1108,7 +1020,7 @@ public final class SessionImpl
|
|||
boolean clearedEffectiveGraph = false;
|
||||
if ( semantic != null ) {
|
||||
if ( ! graph.appliesTo( entityName ) ) {
|
||||
log.debugf( "Clearing effective entity graph for subsequent-select" );
|
||||
log.debug( "Clearing effective entity graph for subsequent-select" );
|
||||
clearedEffectiveGraph = true;
|
||||
effectiveEntityGraph.clear();
|
||||
}
|
||||
|
@ -1265,19 +1177,15 @@ public final class SessionImpl
|
|||
// so to skip the session open, transaction synch, etc.. checks,
|
||||
// which have been proven to be not particularly cheap:
|
||||
// it seems they prevent these hot methods from being inlined.
|
||||
private void fireLoadNoChecks(LoadEvent event, LoadType loadType) {
|
||||
private void fireLoadNoChecks(final LoadEvent event, final LoadType loadType) {
|
||||
pulseTransactionCoordinator();
|
||||
for ( LoadEventListener listener : listeners( EventType.LOAD ) ) {
|
||||
listener.onLoad( event, loadType );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_LOAD.fireEventOnEachListener( event, loadType, LoadEventListener::onLoad );
|
||||
}
|
||||
|
||||
private void fireResolveNaturalId(ResolveNaturalIdEvent event) {
|
||||
private void fireResolveNaturalId(final ResolveNaturalIdEvent event) {
|
||||
checkOpenOrWaitingForAutoClose();
|
||||
pulseTransactionCoordinator();
|
||||
for ( ResolveNaturalIdEventListener listener : listeners( EventType.RESOLVE_NATURAL_ID ) ) {
|
||||
listener.onResolveNaturalId( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_RESOLVE_NATURAL_ID.fireEventOnEachListener( event, ResolveNaturalIdEventListener::onResolveNaturalId );
|
||||
delayedAfterCompletion();
|
||||
}
|
||||
|
||||
|
@ -1320,7 +1228,7 @@ public final class SessionImpl
|
|||
fireRefresh( refreshedAlready, new RefreshEvent( entityName, object, this ) );
|
||||
}
|
||||
|
||||
private void fireRefresh(RefreshEvent event) {
|
||||
private void fireRefresh(final RefreshEvent event) {
|
||||
try {
|
||||
if ( !getSessionFactory().getSessionFactoryOptions().isAllowRefreshDetachedEntity() ) {
|
||||
if ( event.getEntityName() != null ) {
|
||||
|
@ -1335,9 +1243,7 @@ public final class SessionImpl
|
|||
}
|
||||
}
|
||||
pulseTransactionCoordinator();
|
||||
for ( RefreshEventListener listener : listeners( EventType.REFRESH ) ) {
|
||||
listener.onRefresh( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_REFRESH.fireEventOnEachListener( event, RefreshEventListener::onRefresh );
|
||||
}
|
||||
catch (RuntimeException e) {
|
||||
if ( !getSessionFactory().getSessionFactoryOptions().isJpaBootstrap() ) {
|
||||
|
@ -1353,12 +1259,10 @@ public final class SessionImpl
|
|||
}
|
||||
}
|
||||
|
||||
private void fireRefresh(Map refreshedAlready, RefreshEvent event) {
|
||||
private void fireRefresh(final Map refreshedAlready, final RefreshEvent event) {
|
||||
try {
|
||||
pulseTransactionCoordinator();
|
||||
for ( RefreshEventListener listener : listeners( EventType.REFRESH ) ) {
|
||||
listener.onRefresh( event, refreshedAlready );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_REFRESH.fireEventOnEachListener( event, refreshedAlready, RefreshEventListener::onRefresh );
|
||||
}
|
||||
catch (RuntimeException e) {
|
||||
throw getExceptionConverter().convert( e );
|
||||
|
@ -1382,12 +1286,10 @@ public final class SessionImpl
|
|||
fireReplicate( new ReplicateEvent( entityName, obj, replicationMode, this ) );
|
||||
}
|
||||
|
||||
private void fireReplicate(ReplicateEvent event) {
|
||||
private void fireReplicate(final ReplicateEvent event) {
|
||||
checkOpen();
|
||||
pulseTransactionCoordinator();
|
||||
for ( ReplicateEventListener listener : listeners( EventType.REPLICATE ) ) {
|
||||
listener.onReplicate( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_REPLICATE.fireEventOnEachListener( event, ReplicateEventListener::onReplicate );
|
||||
delayedAfterCompletion();
|
||||
}
|
||||
|
||||
|
@ -1400,15 +1302,10 @@ public final class SessionImpl
|
|||
*/
|
||||
@Override
|
||||
public void evict(Object object) throws HibernateException {
|
||||
fireEvict( new EvictEvent( object, this ) );
|
||||
}
|
||||
|
||||
private void fireEvict(EvictEvent event) {
|
||||
checkOpen();
|
||||
pulseTransactionCoordinator();
|
||||
for ( EvictEventListener listener : listeners( EventType.EVICT ) ) {
|
||||
listener.onEvict( event );
|
||||
}
|
||||
final EvictEvent event = new EvictEvent( object, this );
|
||||
fastSessionServices.eventListenerGroup_EVICT.fireEventOnEachListener( event, EvictEventListener::onEvict );
|
||||
delayedAfterCompletion();
|
||||
}
|
||||
|
||||
|
@ -1423,9 +1320,7 @@ public final class SessionImpl
|
|||
return false;
|
||||
}
|
||||
AutoFlushEvent event = new AutoFlushEvent( querySpaces, this );
|
||||
for ( AutoFlushEventListener listener : listeners( EventType.AUTO_FLUSH ) ) {
|
||||
listener.onAutoFlush( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_AUTO_FLUSH.fireEventOnEachListener( event, AutoFlushEventListener::onAutoFlush );
|
||||
return event.isFlushRequired();
|
||||
}
|
||||
|
||||
|
@ -1439,9 +1334,7 @@ public final class SessionImpl
|
|||
return true;
|
||||
}
|
||||
DirtyCheckEvent event = new DirtyCheckEvent( this );
|
||||
for ( DirtyCheckEventListener listener : listeners( EventType.DIRTY_CHECK ) ) {
|
||||
listener.onDirtyCheck( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_DIRTY_CHECK.fireEventOnEachListener( event, DirtyCheckEventListener::onDirtyCheck );
|
||||
delayedAfterCompletion();
|
||||
return event.isDirty();
|
||||
}
|
||||
|
@ -1461,11 +1354,8 @@ public final class SessionImpl
|
|||
throw new HibernateException( "Flush during cascade is dangerous" );
|
||||
}
|
||||
|
||||
FlushEvent flushEvent = new FlushEvent( this );
|
||||
for ( FlushEventListener listener : listeners( EventType.FLUSH ) ) {
|
||||
listener.onFlush( flushEvent );
|
||||
}
|
||||
|
||||
FlushEvent event = new FlushEvent( this );
|
||||
fastSessionServices.eventListenerGroup_FLUSH.fireEventOnEachListener( event, FlushEventListener::onFlush );
|
||||
delayedAfterCompletion();
|
||||
}
|
||||
catch ( RuntimeException e ) {
|
||||
|
@ -2266,9 +2156,7 @@ public final class SessionImpl
|
|||
checkOpenOrWaitingForAutoClose();
|
||||
pulseTransactionCoordinator();
|
||||
InitializeCollectionEvent event = new InitializeCollectionEvent( collection, this );
|
||||
for ( InitializeCollectionEventListener listener : listeners( EventType.INIT_COLLECTION ) ) {
|
||||
listener.onInitializeCollection( event );
|
||||
}
|
||||
fastSessionServices.eventListenerGroup_INIT_COLLECTION.fireEventOnEachListener( event, InitializeCollectionEventListener::onInitializeCollection );
|
||||
delayedAfterCompletion();
|
||||
}
|
||||
|
||||
|
@ -2506,12 +2394,12 @@ public final class SessionImpl
|
|||
// We do not want an exception to be thrown if the transaction
|
||||
// is not accessible. If the transaction is not accessible,
|
||||
// then return null.
|
||||
return isTransactionAccessible() ? accessTransaction() : null;
|
||||
return fastSessionServices.isJtaTransactionAccessible ? accessTransaction() : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeTransactionCompletion() {
|
||||
log.tracef( "SessionImpl#beforeTransactionCompletion()" );
|
||||
log.trace( "SessionImpl#beforeTransactionCompletion()" );
|
||||
flushBeforeTransactionCompletion();
|
||||
actionQueue.beforeTransactionCompletion();
|
||||
try {
|
||||
|
@ -2525,7 +2413,9 @@ public final class SessionImpl
|
|||
|
||||
@Override
|
||||
public void afterTransactionCompletion(boolean successful, boolean delayed) {
|
||||
log.tracef( "SessionImpl#afterTransactionCompletion(successful=%s, delayed=%s)", successful, delayed );
|
||||
if ( log.isTraceEnabled() ) {
|
||||
log.tracef( "SessionImpl#afterTransactionCompletion(successful=%s, delayed=%s)", successful, delayed );
|
||||
}
|
||||
|
||||
if ( !isClosed() || waitingForAutoClose ) {
|
||||
if ( autoClear ||!successful ) {
|
||||
|
@ -3364,25 +3254,6 @@ public final class SessionImpl
|
|||
return getHibernateFlushMode() != FlushMode.MANUAL;
|
||||
}
|
||||
|
||||
private static final AfterCompletionAction STANDARD_AFTER_COMPLETION_ACTION = (AfterCompletionAction) (successful, session) -> {
|
||||
// nothing to do by default.
|
||||
};
|
||||
|
||||
|
||||
public static class ManagedFlushCheckerStandardImpl implements ManagedFlushChecker {
|
||||
@Override
|
||||
public boolean shouldDoManagedFlush(SessionImplementor session) {
|
||||
if ( session.isClosed() ) {
|
||||
return false;
|
||||
}
|
||||
return session.getHibernateFlushMode() != FlushMode.MANUAL;
|
||||
}
|
||||
}
|
||||
|
||||
private static final ManagedFlushCheckerStandardImpl STANDARD_MANAGED_FLUSH_CHECKER = new ManagedFlushCheckerStandardImpl() {
|
||||
};
|
||||
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// HibernateEntityManager impl
|
||||
|
||||
|
@ -3399,57 +3270,17 @@ public final class SessionImpl
|
|||
@Override
|
||||
public LockOptions getLockRequest(LockModeType lockModeType, Map<String, Object> properties) {
|
||||
LockOptions lockOptions = new LockOptions();
|
||||
LockOptions.copy( this.lockOptions, lockOptions );
|
||||
if ( this.lockOptions != null ) { //otherwise the default LockOptions constructor is the same as DEFAULT_LOCK_OPTIONS
|
||||
LockOptions.copy( this.lockOptions, lockOptions );
|
||||
}
|
||||
lockOptions.setLockMode( LockModeTypeHelper.getLockMode( lockModeType ) );
|
||||
if ( properties != null ) {
|
||||
setLockOptions( properties, lockOptions );
|
||||
LockOptionsHelper.applyPropertiesToLockOptions( properties, () -> lockOptions );
|
||||
}
|
||||
return lockOptions;
|
||||
}
|
||||
|
||||
private void setLockOptions(Map<String, Object> props, LockOptions options) {
|
||||
Object lockScope = props.get( JPA_LOCK_SCOPE );
|
||||
if ( lockScope instanceof String && PessimisticLockScope.valueOf( ( String ) lockScope ) == PessimisticLockScope.EXTENDED ) {
|
||||
options.setScope( true );
|
||||
}
|
||||
else if ( lockScope instanceof PessimisticLockScope ) {
|
||||
boolean extended = PessimisticLockScope.EXTENDED.equals( lockScope );
|
||||
options.setScope( extended );
|
||||
}
|
||||
else if ( lockScope != null ) {
|
||||
throw new PersistenceException( "Unable to parse " + JPA_LOCK_SCOPE + ": " + lockScope );
|
||||
}
|
||||
|
||||
Object lockTimeout = props.get( JPA_LOCK_TIMEOUT );
|
||||
int timeout = 0;
|
||||
boolean timeoutSet = false;
|
||||
if ( lockTimeout instanceof String ) {
|
||||
timeout = Integer.parseInt( ( String ) lockTimeout );
|
||||
timeoutSet = true;
|
||||
}
|
||||
else if ( lockTimeout instanceof Number ) {
|
||||
timeout = ( (Number) lockTimeout ).intValue();
|
||||
timeoutSet = true;
|
||||
}
|
||||
else if ( lockTimeout != null ) {
|
||||
throw new PersistenceException( "Unable to parse " + JPA_LOCK_TIMEOUT + ": " + lockTimeout );
|
||||
}
|
||||
|
||||
if ( timeoutSet ) {
|
||||
if ( timeout == LockOptions.SKIP_LOCKED ) {
|
||||
options.setTimeOut( LockOptions.SKIP_LOCKED );
|
||||
}
|
||||
else if ( timeout < 0 ) {
|
||||
options.setTimeOut( LockOptions.WAIT_FOREVER );
|
||||
}
|
||||
else if ( timeout == 0 ) {
|
||||
options.setTimeOut( LockOptions.NO_WAIT );
|
||||
}
|
||||
else {
|
||||
options.setTimeOut( timeout );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -3555,20 +3386,20 @@ public final class SessionImpl
|
|||
}
|
||||
if ( retrieveMode == null ) {
|
||||
// use the EM setting
|
||||
retrieveMode = determineCacheRetrieveMode( this.properties );
|
||||
retrieveMode = fastSessionServices.getCacheRetrieveMode( this.properties );
|
||||
}
|
||||
if ( storeMode == null ) {
|
||||
// use the EM setting
|
||||
storeMode = determineCacheStoreMode( this.properties );
|
||||
storeMode = fastSessionServices.getCacheStoreMode( this.properties );
|
||||
}
|
||||
return CacheModeHelper.interpretCacheMode( storeMode, retrieveMode );
|
||||
}
|
||||
|
||||
private CacheRetrieveMode determineCacheRetrieveMode(Map<String, Object> settings) {
|
||||
private static CacheRetrieveMode determineCacheRetrieveMode(Map<String, Object> settings) {
|
||||
return ( CacheRetrieveMode ) settings.get( JPA_SHARED_CACHE_RETRIEVE_MODE );
|
||||
}
|
||||
|
||||
private CacheStoreMode determineCacheStoreMode(Map<String, Object> settings) {
|
||||
private static CacheStoreMode determineCacheStoreMode(Map<String, Object> settings) {
|
||||
return ( CacheStoreMode ) settings.get( JPA_SHARED_CACHE_STORE_MODE );
|
||||
}
|
||||
|
||||
|
@ -3698,12 +3529,48 @@ public final class SessionImpl
|
|||
return;
|
||||
}
|
||||
|
||||
if ( propertyName == null ) {
|
||||
log.warnf( "Property having key null is illegal; value won't be set." );
|
||||
return;
|
||||
}
|
||||
|
||||
//Store property for future reference:
|
||||
|
||||
if ( properties == null ) {
|
||||
properties = computeCurrentSessionProperties();
|
||||
}
|
||||
properties.put( propertyName, value );
|
||||
applyProperties();
|
||||
|
||||
//now actually update settings, if it's any of these which have a direct impact on this Session state:
|
||||
|
||||
if ( AvailableSettings.FLUSH_MODE.equals( propertyName ) ) {
|
||||
setHibernateFlushMode( ConfigurationHelper.getFlushMode( value, FlushMode.AUTO ) );
|
||||
}
|
||||
else if ( JPA_LOCK_SCOPE.equals( propertyName ) || JPA_LOCK_TIMEOUT.equals( propertyName ) ) {
|
||||
LockOptionsHelper.applyPropertiesToLockOptions( properties, this::getLockOptionsForWrite );
|
||||
}
|
||||
else if ( JPA_SHARED_CACHE_RETRIEVE_MODE.equals( propertyName ) || JPA_SHARED_CACHE_STORE_MODE.equals( propertyName ) ) {
|
||||
getSession().setCacheMode(
|
||||
CacheModeHelper.interpretCacheMode(
|
||||
determineCacheStoreMode( properties ),
|
||||
determineCacheRetrieveMode( properties )
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Object> computeCurrentSessionProperties() {
|
||||
final HashMap<String, Object> map = new HashMap<>( fastSessionServices.defaultSessionProperties );
|
||||
//The FLUSH_MODE is always set at Session creation time, so it needs special treatment to not eagerly initialize this Map:
|
||||
map.put( AvailableSettings.FLUSH_MODE, getHibernateFlushMode().name() );
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> getProperties() {
|
||||
if ( properties == null ) {
|
||||
properties = computeCurrentSessionProperties();
|
||||
}
|
||||
return Collections.unmodifiableMap( properties );
|
||||
}
|
||||
|
||||
|
@ -3926,7 +3793,5 @@ public final class SessionImpl
|
|||
for ( String filterName : loadQueryInfluencers.getEnabledFilterNames() ) {
|
||||
( (FilterImpl) loadQueryInfluencers.getEnabledFilter( filterName ) ).afterDeserialize( getFactory() );
|
||||
}
|
||||
|
||||
this.discardOnClose = getFactory().getSessionFactoryOptions().isReleaseResourcesOnCloseEnabled();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@ public final class StringHelper {
|
|||
if ( template == null ) {
|
||||
return null;
|
||||
}
|
||||
int loc = template.indexOf( placeholder );
|
||||
int loc = indexOfPlaceHolder( template, placeholder, wholeWords );
|
||||
if ( loc < 0 ) {
|
||||
return template;
|
||||
}
|
||||
|
@ -189,6 +189,24 @@ public final class StringHelper {
|
|||
return buf.toString();
|
||||
}
|
||||
|
||||
private static int indexOfPlaceHolder(String template, String placeholder, boolean wholeWords) {
|
||||
if ( wholeWords ) {
|
||||
int placeholderIndex = -1;
|
||||
boolean isPartialPlaceholderMatch;
|
||||
do {
|
||||
placeholderIndex = template.indexOf( placeholder, placeholderIndex + 1 );
|
||||
isPartialPlaceholderMatch = placeholderIndex != -1 &&
|
||||
template.length() > placeholderIndex + placeholder.length() &&
|
||||
Character.isJavaIdentifierPart( template.charAt( placeholderIndex + placeholder.length() ) );
|
||||
} while ( placeholderIndex != -1 && isPartialPlaceholderMatch );
|
||||
|
||||
return placeholderIndex;
|
||||
}
|
||||
else {
|
||||
return template.indexOf( placeholder );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to find the ordinal parameters (e.g. '?1') in a string.
|
||||
*/
|
||||
|
|
|
@ -13,6 +13,7 @@ import java.util.Iterator;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
|
@ -62,6 +63,15 @@ public final class IdentityMap<K,V> implements Map<K,V> {
|
|||
identityMap.map.forEach( (kIdentityKey, v) -> consumer.accept( kIdentityKey.key ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Override Map{@link #forEach(BiConsumer)} to provide a more efficient implementation
|
||||
* @param action the operation to apply to each element
|
||||
*/
|
||||
@Override
|
||||
public void forEach(BiConsumer<? super K, ? super V> action) {
|
||||
map.forEach( (k,v) -> action.accept( k.key, v ) );
|
||||
}
|
||||
|
||||
public Iterator<K> keyIterator() {
|
||||
return new KeyIterator<K>( map.keySet().iterator() );
|
||||
}
|
||||
|
|
|
@ -30,21 +30,21 @@ public abstract class ConfigurationHelper {
|
|||
|
||||
public static FlushMode getFlushMode(Object value, FlushMode defaultFlushMode) {
|
||||
final FlushMode flushMode;
|
||||
if (value instanceof FlushMode) {
|
||||
if ( value instanceof FlushMode ) {
|
||||
flushMode = (FlushMode) value;
|
||||
}
|
||||
else if (value instanceof javax.persistence.FlushModeType) {
|
||||
flushMode = ConfigurationHelper.getFlushMode( (javax.persistence.FlushModeType) value);
|
||||
else if ( value instanceof javax.persistence.FlushModeType ) {
|
||||
flushMode = ConfigurationHelper.getFlushMode( (javax.persistence.FlushModeType) value );
|
||||
}
|
||||
else if (value instanceof String) {
|
||||
flushMode = ConfigurationHelper.getFlushMode( (String) value);
|
||||
else if ( value instanceof String ) {
|
||||
flushMode = ConfigurationHelper.getFlushMode( (String) value );
|
||||
}
|
||||
else {
|
||||
flushMode = defaultFlushMode;
|
||||
}
|
||||
|
||||
if (flushMode == null) {
|
||||
throw new PersistenceException("Unable to parse org.hibernate.flushMode: " + value);
|
||||
if ( flushMode == null ) {
|
||||
throw new PersistenceException( "Unable to parse org.hibernate.flushMode: " + value );
|
||||
}
|
||||
|
||||
return flushMode;
|
||||
|
@ -55,21 +55,21 @@ public abstract class ConfigurationHelper {
|
|||
}
|
||||
|
||||
private static FlushMode getFlushMode(String flushMode) {
|
||||
if (flushMode == null) {
|
||||
if ( flushMode == null ) {
|
||||
return null;
|
||||
}
|
||||
flushMode = flushMode.toUpperCase(Locale.ROOT);
|
||||
flushMode = flushMode.toUpperCase( Locale.ROOT );
|
||||
return FlushMode.valueOf( flushMode );
|
||||
}
|
||||
|
||||
private static FlushMode getFlushMode(FlushModeType flushMode) {
|
||||
switch(flushMode) {
|
||||
switch ( flushMode ) {
|
||||
case AUTO:
|
||||
return FlushMode.AUTO;
|
||||
case COMMIT:
|
||||
return FlushMode.COMMIT;
|
||||
default:
|
||||
throw new AssertionFailure("Unknown FlushModeType: " + flushMode);
|
||||
throw new AssertionFailure( "Unknown FlushModeType: " + flushMode );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
package org.hibernate.jpa.internal.util;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
import javax.persistence.PersistenceException;
|
||||
import javax.persistence.PessimisticLockScope;
|
||||
|
||||
import org.hibernate.LockOptions;
|
||||
|
||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
|
||||
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
|
||||
|
||||
public final class LockOptionsHelper {
|
||||
|
||||
private LockOptionsHelper() {
|
||||
//utility class, not to be constructed
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies configuration properties on a {@link LockOptions} instance, passed as a supplier
|
||||
* so to make it possible to skip allocating the {@link LockOptions} instance if there's
|
||||
* nothing to set.
|
||||
*
|
||||
* @param props The configuration properties
|
||||
* @param lockOptionsSupplier The reference to the lock to modify
|
||||
*/
|
||||
public static void applyPropertiesToLockOptions(final Map<String, Object> props, final Supplier<LockOptions> lockOptionsSupplier) {
|
||||
Object lockScope = props.get( JPA_LOCK_SCOPE );
|
||||
if ( lockScope instanceof String && PessimisticLockScope.valueOf( (String) lockScope ) == PessimisticLockScope.EXTENDED ) {
|
||||
lockOptionsSupplier.get().setScope( true );
|
||||
}
|
||||
else if ( lockScope instanceof PessimisticLockScope ) {
|
||||
boolean extended = PessimisticLockScope.EXTENDED.equals( lockScope );
|
||||
lockOptionsSupplier.get().setScope( extended );
|
||||
}
|
||||
else if ( lockScope != null ) {
|
||||
throw new PersistenceException( "Unable to parse " + JPA_LOCK_SCOPE + ": " + lockScope );
|
||||
}
|
||||
|
||||
Object lockTimeout = props.get( JPA_LOCK_TIMEOUT );
|
||||
int timeout = 0;
|
||||
boolean timeoutSet = false;
|
||||
if ( lockTimeout instanceof String ) {
|
||||
timeout = Integer.parseInt( (String) lockTimeout );
|
||||
timeoutSet = true;
|
||||
}
|
||||
else if ( lockTimeout instanceof Number ) {
|
||||
timeout = ( (Number) lockTimeout ).intValue();
|
||||
timeoutSet = true;
|
||||
}
|
||||
else if ( lockTimeout != null ) {
|
||||
throw new PersistenceException( "Unable to parse " + JPA_LOCK_TIMEOUT + ": " + lockTimeout );
|
||||
}
|
||||
|
||||
if ( timeoutSet ) {
|
||||
if ( timeout == LockOptions.SKIP_LOCKED ) {
|
||||
lockOptionsSupplier.get().setTimeOut( LockOptions.SKIP_LOCKED );
|
||||
}
|
||||
else if ( timeout < 0 ) {
|
||||
lockOptionsSupplier.get().setTimeOut( LockOptions.WAIT_FOREVER );
|
||||
}
|
||||
else if ( timeout == 0 ) {
|
||||
lockOptionsSupplier.get().setTimeOut( LockOptions.NO_WAIT );
|
||||
}
|
||||
else {
|
||||
lockOptionsSupplier.get().setTimeOut( timeout );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -11,7 +11,6 @@ import java.sql.PreparedStatement;
|
|||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -38,7 +37,6 @@ import org.hibernate.internal.CoreLogging;
|
|||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.loader.plan.exec.query.spi.NamedParameterContext;
|
||||
import org.hibernate.loader.plan.exec.spi.LoadQueryDetails;
|
||||
import org.hibernate.loader.spi.AfterLoadAction;
|
||||
import org.hibernate.resource.jdbc.ResourceRegistry;
|
||||
import org.hibernate.transform.ResultTransformer;
|
||||
import org.hibernate.type.Type;
|
||||
|
@ -138,13 +136,6 @@ public abstract class AbstractLoadPlanBasedLoader {
|
|||
}
|
||||
}
|
||||
|
||||
protected SqlStatementWrapper executeQueryStatement(
|
||||
final QueryParameters queryParameters,
|
||||
final boolean scroll,
|
||||
final SharedSessionContractImplementor session) throws SQLException {
|
||||
return executeQueryStatement( getStaticLoadQuery().getSqlStatement(), queryParameters, scroll, session );
|
||||
}
|
||||
|
||||
protected SqlStatementWrapper executeQueryStatement(
|
||||
String sqlStatement,
|
||||
QueryParameters queryParameters,
|
||||
|
|
|
@ -63,10 +63,6 @@ public class ResultSetProcessorImpl implements ResultSetProcessor {
|
|||
this.hadSubselectFetches = hadSubselectFetches;
|
||||
}
|
||||
|
||||
public RowReader getRowReader() {
|
||||
return rowReader;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScrollableResultSetProcessor toOnDemandForm() {
|
||||
// todo : implement
|
||||
|
@ -86,13 +82,20 @@ public class ResultSetProcessorImpl implements ResultSetProcessor {
|
|||
|
||||
handlePotentiallyEmptyCollectionRootReturns( loadPlan, queryParameters.getCollectionKeys(), resultSet, session );
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
final int maxRows;
|
||||
final List loadResults;
|
||||
final RowSelection selection = queryParameters.getRowSelection();
|
||||
if ( LimitHelper.hasMaxRows( selection ) ) {
|
||||
maxRows = selection.getMaxRows();
|
||||
LOG.tracef( "Limiting ResultSet processing to just %s rows", maxRows );
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracef( "Limiting ResultSet processing to just %s rows", maxRows );
|
||||
}
|
||||
int sizeHint = maxRows < 50 ? maxRows : 50;
|
||||
loadResults = new ArrayList( sizeHint );
|
||||
}
|
||||
else {
|
||||
loadResults = new ArrayList();
|
||||
maxRows = Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
|
@ -113,12 +116,14 @@ public class ResultSetProcessorImpl implements ResultSetProcessor {
|
|||
hadSubselectFetches
|
||||
);
|
||||
|
||||
final List loadResults = new ArrayList();
|
||||
|
||||
LOG.trace( "Processing result set" );
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace( "Processing result set" );
|
||||
}
|
||||
int count;
|
||||
for ( count = 0; count < maxRows && resultSet.next(); count++ ) {
|
||||
LOG.debugf( "Starting ResultSet row #%s", count );
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracef( "Starting ResultSet row #%s", count );
|
||||
}
|
||||
|
||||
Object logicalRow = rowReader.readRow( resultSet, context );
|
||||
|
||||
|
@ -129,7 +134,9 @@ public class ResultSetProcessorImpl implements ResultSetProcessor {
|
|||
context.finishUpRow();
|
||||
}
|
||||
|
||||
LOG.tracev( "Done processing result set ({0} rows)", count );
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Done processing result set ({0} rows)", count );
|
||||
}
|
||||
|
||||
rowReader.finishUp( context, afterLoadActionList );
|
||||
context.wrapUp();
|
||||
|
|
|
@ -1915,8 +1915,9 @@ public abstract class AbstractCollectionPersister
|
|||
}
|
||||
|
||||
String[] result = new String[rawAliases.length];
|
||||
final Alias alias = new Alias( suffix );
|
||||
for ( int i = 0; i < rawAliases.length; i++ ) {
|
||||
result[i] = new Alias( suffix ).toUnquotedAliasString( rawAliases[i] );
|
||||
result[i] = alias.toUnquotedAliasString( rawAliases[i] );
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -533,6 +533,14 @@ public abstract class AbstractEntityPersister
|
|||
return propertySelectable;
|
||||
}
|
||||
|
||||
public String[] getTableNames() {
|
||||
String[] tableNames = new String[getTableSpan()];
|
||||
for ( int i = 0; i < tableNames.length; i++ ) {
|
||||
tableNames[i] = getTableName( i );
|
||||
}
|
||||
return tableNames;
|
||||
}
|
||||
|
||||
@SuppressWarnings("UnnecessaryBoxing")
|
||||
public AbstractEntityPersister(
|
||||
final PersistentClass persistentClass,
|
||||
|
@ -560,7 +568,7 @@ public abstract class AbstractEntityPersister
|
|||
this.naturalIdRegionAccessStrategy = null;
|
||||
}
|
||||
|
||||
this.entityMetamodel = new EntityMetamodel( persistentClass, this, creationContext );
|
||||
this.entityMetamodel = new EntityMetamodel( persistentClass, this, factory );
|
||||
this.entityTuplizer = this.entityMetamodel.getTuplizer();
|
||||
|
||||
if ( entityMetamodel.isMutable() ) {
|
||||
|
@ -696,14 +704,7 @@ public abstract class AbstractEntityPersister
|
|||
final boolean lazy = ! EnhancementHelper.includeInBaseFetchGroup(
|
||||
prop,
|
||||
entityMetamodel.isInstrumented(),
|
||||
creationContext.getSessionFactory().getSessionFactoryOptions().isEnhancementAsProxyEnabled(),
|
||||
associatedEntityName -> {
|
||||
final PersistentClass bootEntityDescriptor = creationContext.getMetadata().getEntityBinding( associatedEntityName );
|
||||
if ( bootEntityDescriptor == null ) {
|
||||
return false;
|
||||
}
|
||||
return bootEntityDescriptor.hasSubclasses();
|
||||
}
|
||||
creationContext.getSessionFactory().getSessionFactoryOptions().isEnhancementAsProxyEnabled()
|
||||
);
|
||||
|
||||
if ( lazy ) {
|
||||
|
@ -779,14 +780,7 @@ public abstract class AbstractEntityPersister
|
|||
final boolean lazy = ! EnhancementHelper.includeInBaseFetchGroup(
|
||||
prop,
|
||||
entityMetamodel.isInstrumented(),
|
||||
creationContext.getSessionFactory().getSessionFactoryOptions().isEnhancementAsProxyEnabled(),
|
||||
associatedEntityName -> {
|
||||
final PersistentClass bootEntityDescriptor = creationContext.getMetadata().getEntityBinding( associatedEntityName );
|
||||
if ( bootEntityDescriptor == null ) {
|
||||
return false;
|
||||
}
|
||||
return bootEntityDescriptor.hasSubclasses();
|
||||
}
|
||||
creationContext.getSessionFactory().getSessionFactoryOptions().isEnhancementAsProxyEnabled()
|
||||
);
|
||||
while ( colIter.hasNext() ) {
|
||||
Selectable thing = (Selectable) colIter.next();
|
||||
|
@ -3890,7 +3884,8 @@ public abstract class AbstractEntityPersister
|
|||
alias,
|
||||
innerJoin,
|
||||
includeSubclasses,
|
||||
Collections.emptySet()
|
||||
Collections.emptySet(),
|
||||
null
|
||||
).toFromFragmentString();
|
||||
}
|
||||
|
||||
|
@ -3903,7 +3898,19 @@ public abstract class AbstractEntityPersister
|
|||
// NOTE : Not calling createJoin here is just a performance optimization
|
||||
return getSubclassTableSpan() == 1
|
||||
? ""
|
||||
: createJoin( alias, innerJoin, includeSubclasses, treatAsDeclarations ).toFromFragmentString();
|
||||
: createJoin( alias, innerJoin, includeSubclasses, treatAsDeclarations, null ).toFromFragmentString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fromJoinFragment(
|
||||
String alias,
|
||||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations,
|
||||
Set<String> referencedTables) {
|
||||
return getSubclassTableSpan() == 1
|
||||
? ""
|
||||
: createJoin( alias, innerJoin, includeSubclasses, treatAsDeclarations, referencedTables ).toFromFragmentString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -3915,7 +3922,8 @@ public abstract class AbstractEntityPersister
|
|||
alias,
|
||||
innerJoin,
|
||||
includeSubclasses,
|
||||
Collections.emptySet()
|
||||
Collections.emptySet(),
|
||||
null
|
||||
).toWhereFragmentString();
|
||||
}
|
||||
|
||||
|
@ -3928,7 +3936,7 @@ public abstract class AbstractEntityPersister
|
|||
// NOTE : Not calling createJoin here is just a performance optimization
|
||||
return getSubclassTableSpan() == 1
|
||||
? ""
|
||||
: createJoin( alias, innerJoin, includeSubclasses, treatAsDeclarations ).toWhereFragmentString();
|
||||
: createJoin( alias, innerJoin, includeSubclasses, treatAsDeclarations, null ).toWhereFragmentString();
|
||||
}
|
||||
|
||||
protected boolean isSubclassTableLazy(int j) {
|
||||
|
@ -3940,6 +3948,15 @@ public abstract class AbstractEntityPersister
|
|||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations) {
|
||||
return createJoin(name, innerJoin, includeSubclasses, treatAsDeclarations, null);
|
||||
}
|
||||
|
||||
protected JoinFragment createJoin(
|
||||
String name,
|
||||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations,
|
||||
Set<String> referencedTables) {
|
||||
// IMPL NOTE : all joins join to the pk of the driving table
|
||||
final String[] idCols = StringHelper.qualify( name, getIdentifierColumnNames() );
|
||||
final JoinFragment join = getFactory().getDialect().createOuterJoinFragment();
|
||||
|
@ -3950,7 +3967,8 @@ public abstract class AbstractEntityPersister
|
|||
j,
|
||||
innerJoin,
|
||||
includeSubclasses,
|
||||
treatAsDeclarations
|
||||
treatAsDeclarations,
|
||||
referencedTables
|
||||
);
|
||||
|
||||
if ( joinType != null && joinType != JoinType.NONE ) {
|
||||
|
@ -3971,8 +3989,28 @@ public abstract class AbstractEntityPersister
|
|||
boolean canInnerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations) {
|
||||
return determineSubclassTableJoinType(
|
||||
subclassTableNumber,
|
||||
canInnerJoin,
|
||||
includeSubclasses,
|
||||
treatAsDeclarations,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
protected JoinType determineSubclassTableJoinType(
|
||||
int subclassTableNumber,
|
||||
boolean canInnerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations,
|
||||
Set<String> referencedTables) {
|
||||
|
||||
if ( isClassOrSuperclassTable( subclassTableNumber ) ) {
|
||||
String superclassTableName = getSubclassTableName( subclassTableNumber );
|
||||
if ( referencedTables != null && canOmitSuperclassTableJoin() && !referencedTables.contains(
|
||||
superclassTableName ) ) {
|
||||
return JoinType.NONE;
|
||||
}
|
||||
final boolean shouldInnerJoin = canInnerJoin
|
||||
&& !isInverseTable( subclassTableNumber )
|
||||
&& !isNullableTable( subclassTableNumber );
|
||||
|
@ -5212,8 +5250,8 @@ public abstract class AbstractEntityPersister
|
|||
if ( attribute.getType() instanceof ComponentType ) {
|
||||
final ComponentType type = (ComponentType) attribute.getType();
|
||||
final ValueGeneration[] propertyValueGenerationStrategies = type.getPropertyValueGenerationStrategies();
|
||||
for ( int i = 0; i < propertyValueGenerationStrategies.length; i++ ) {
|
||||
if ( isReadRequired( propertyValueGenerationStrategies[i], matchTiming ) ) {
|
||||
for ( ValueGeneration propertyValueGenerationStrategie : propertyValueGenerationStrategies ) {
|
||||
if ( isReadRequired( propertyValueGenerationStrategie, matchTiming ) ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -5735,6 +5773,15 @@ public abstract class AbstractEntityPersister
|
|||
return ArrayHelper.to2DStringArray( polymorphicJoinColumns );
|
||||
}
|
||||
|
||||
/**
|
||||
* If true, persister can omit superclass tables during joining if they are not needed in the query.
|
||||
*
|
||||
* @return true if the persister can do it
|
||||
*/
|
||||
public boolean canOmitSuperclassTableJoin() {
|
||||
return false;
|
||||
}
|
||||
|
||||
private void prepareEntityIdentifierDefinition() {
|
||||
if ( entityIdentifierDefinition != null ) {
|
||||
return;
|
||||
|
|
|
@ -57,6 +57,19 @@ public interface Joinable {
|
|||
*/
|
||||
public String fromJoinFragment(String alias, boolean innerJoin, boolean includeSubclasses, Set<String> treatAsDeclarations);
|
||||
|
||||
/**
|
||||
* Get the from clause part of any joins
|
||||
* (optional operation)
|
||||
*/
|
||||
default String fromJoinFragment(
|
||||
String alias,
|
||||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations,
|
||||
Set<String> referencedTables) {
|
||||
return fromJoinFragment( alias, innerJoin, includeSubclasses, treatAsDeclarations );
|
||||
}
|
||||
|
||||
/**
|
||||
* The columns to join on
|
||||
*/
|
||||
|
|
|
@ -1053,6 +1053,7 @@ public class JoinedSubclassEntityPersister extends AbstractEntityPersister {
|
|||
return subclassNamesBySubclassTable[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPropertyTableName(String propertyName) {
|
||||
Integer index = getEntityMetamodel().getPropertyIndexOrNull( propertyName );
|
||||
if ( index == null ) {
|
||||
|
@ -1118,4 +1119,9 @@ public class JoinedSubclassEntityPersister extends AbstractEntityPersister {
|
|||
public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {
|
||||
return new DynamicFilterAliasGenerator(subclassTableNameClosure, rootAlias);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canOmitSuperclassTableJoin() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -809,6 +809,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
return isNullableSubclassTable[j];
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPropertyTableName(String propertyName) {
|
||||
Integer index = getEntityMetamodel().getPropertyIndexOrNull( propertyName );
|
||||
if ( index == null ) {
|
||||
|
|
|
@ -466,6 +466,7 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPropertyTableName(String propertyName) {
|
||||
//TODO: check this....
|
||||
return getTableName();
|
||||
|
@ -483,4 +484,5 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {
|
||||
return new StaticFilterAliasGenerator( rootAlias );
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -48,20 +48,17 @@ public class LogicalConnectionManagedImpl extends AbstractLogicalConnectionImple
|
|||
public LogicalConnectionManagedImpl(
|
||||
JdbcConnectionAccess jdbcConnectionAccess,
|
||||
JdbcSessionContext jdbcSessionContext,
|
||||
ResourceRegistry resourceRegistry) {
|
||||
ResourceRegistry resourceRegistry,
|
||||
JdbcServices jdbcServices) {
|
||||
this.jdbcConnectionAccess = jdbcConnectionAccess;
|
||||
this.observer = jdbcSessionContext.getObserver();
|
||||
this.resourceRegistry = resourceRegistry;
|
||||
|
||||
this.connectionHandlingMode = determineConnectionHandlingMode(
|
||||
jdbcSessionContext.getPhysicalConnectionHandlingMode(),
|
||||
jdbcConnectionAccess
|
||||
jdbcConnectionAccess );
|
||||
|
||||
);
|
||||
|
||||
this.sqlExceptionHelper = jdbcSessionContext.getServiceRegistry()
|
||||
.getService( JdbcServices.class )
|
||||
.getSqlExceptionHelper();
|
||||
this.sqlExceptionHelper = jdbcServices.getSqlExceptionHelper();
|
||||
|
||||
if ( connectionHandlingMode.getAcquisitionMode() == ConnectionAcquisitionMode.IMMEDIATELY ) {
|
||||
acquireConnectionIfNeeded();
|
||||
|
@ -94,14 +91,15 @@ public class LogicalConnectionManagedImpl extends AbstractLogicalConnectionImple
|
|||
JdbcConnectionAccess jdbcConnectionAccess,
|
||||
JdbcSessionContext jdbcSessionContext,
|
||||
boolean closed) {
|
||||
this( jdbcConnectionAccess, jdbcSessionContext, new ResourceRegistryStandardImpl() );
|
||||
this( jdbcConnectionAccess, jdbcSessionContext, new ResourceRegistryStandardImpl(),
|
||||
jdbcSessionContext.getServiceRegistry().getService( JdbcServices.class )
|
||||
);
|
||||
this.closed = closed;
|
||||
}
|
||||
|
||||
private Connection acquireConnectionIfNeeded() {
|
||||
if ( physicalConnection == null ) {
|
||||
// todo : is this the right place for these observer calls?
|
||||
observer.jdbcConnectionAcquisitionStart();
|
||||
try {
|
||||
physicalConnection = jdbcConnectionAccess.obtainConnection();
|
||||
}
|
||||
|
@ -187,8 +185,6 @@ public class LogicalConnectionManagedImpl extends AbstractLogicalConnectionImple
|
|||
return;
|
||||
}
|
||||
|
||||
// todo : is this the right place for these observer calls?
|
||||
observer.jdbcConnectionReleaseStart();
|
||||
try {
|
||||
if ( !physicalConnection.isClosed() ) {
|
||||
sqlExceptionHelper.logAndClearWarnings( physicalConnection );
|
||||
|
@ -221,7 +217,7 @@ public class LogicalConnectionManagedImpl extends AbstractLogicalConnectionImple
|
|||
public static LogicalConnectionManagedImpl deserialize(
|
||||
ObjectInputStream ois,
|
||||
JdbcConnectionAccess jdbcConnectionAccess,
|
||||
JdbcSessionContext jdbcSessionContext) throws IOException, ClassNotFoundException {
|
||||
JdbcSessionContext jdbcSessionContext) throws IOException {
|
||||
final boolean isClosed = ois.readBoolean();
|
||||
return new LogicalConnectionManagedImpl( jdbcConnectionAccess, jdbcSessionContext, isClosed );
|
||||
}
|
||||
|
|
|
@ -13,13 +13,7 @@ import java.sql.ResultSet;
|
|||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.JDBCException;
|
||||
|
@ -29,19 +23,40 @@ import org.hibernate.resource.jdbc.ResourceRegistry;
|
|||
import org.hibernate.resource.jdbc.spi.JdbcObserver;
|
||||
|
||||
/**
|
||||
* Helps to track statements and resultsets which need being closed.
|
||||
* This class is not threadsafe.
|
||||
*
|
||||
* Note regarding performance: we had evidence that allocating Iterators
|
||||
* to implement the cleanup on each element recursively was the dominant
|
||||
* resource cost, so we decided using "forEach" and lambdas in this case.
|
||||
* However the forEach/lambda combination is able to dodge allocating
|
||||
* Iterators on HashMap and ArrayList, but not on HashSet (at least on JDK8 and 11).
|
||||
* Therefore some types which should ideally be modelled as a Set have
|
||||
* been implemented using HashMap.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
* @author Sanne Grinovero
|
||||
*/
|
||||
public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
||||
public final class ResourceRegistryStandardImpl implements ResourceRegistry {
|
||||
|
||||
private static final CoreMessageLogger log = CoreLogging.messageLogger( ResourceRegistryStandardImpl.class );
|
||||
|
||||
// Dummy value to associate with an Object in the backing Map when we use it as a set:
|
||||
private static final Object PRESENT = new Object();
|
||||
|
||||
//Used instead of Collections.EMPTY_SET to avoid polymorhic calls on xref;
|
||||
//Also, uses an HashMap as it were an HashSet, as technically we just need the Set semantics
|
||||
//but in this case the overhead of HashSet is not negligible.
|
||||
private static final HashMap<ResultSet,Object> EMPTY = new HashMap<ResultSet,Object>( 1, 0.2f );
|
||||
|
||||
private final JdbcObserver jdbcObserver;
|
||||
|
||||
private final Map<Statement, Set<ResultSet>> xref = new HashMap<Statement, Set<ResultSet>>();
|
||||
private final Set<ResultSet> unassociatedResultSets = new HashSet<ResultSet>();
|
||||
private final HashMap<Statement, HashMap<ResultSet,Object>> xref = new HashMap<>();
|
||||
private final HashMap<ResultSet,Object> unassociatedResultSets = new HashMap<ResultSet,Object>();
|
||||
|
||||
private List<Blob> blobs;
|
||||
private List<Clob> clobs;
|
||||
private List<NClob> nclobs;
|
||||
private ArrayList<Blob> blobs;
|
||||
private ArrayList<Clob> clobs;
|
||||
private ArrayList<NClob> nclobs;
|
||||
|
||||
private Statement lastQuery;
|
||||
|
||||
|
@ -67,7 +82,7 @@ public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
|||
public void register(Statement statement, boolean cancelable) {
|
||||
log.tracef( "Registering statement [%s]", statement );
|
||||
|
||||
Set<ResultSet> previousValue = xref.putIfAbsent( statement, Collections.EMPTY_SET );
|
||||
HashMap<ResultSet,Object> previousValue = xref.putIfAbsent( statement, EMPTY );
|
||||
if ( previousValue != null ) {
|
||||
throw new HibernateException( "JDBC Statement already registered" );
|
||||
}
|
||||
|
@ -81,7 +96,7 @@ public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
|||
public void release(Statement statement) {
|
||||
log.tracev( "Releasing statement [{0}]", statement );
|
||||
|
||||
final Set<ResultSet> resultSets = xref.remove( statement );
|
||||
final HashMap<ResultSet,Object> resultSets = xref.remove( statement );
|
||||
if ( resultSets != null ) {
|
||||
closeAll( resultSets );
|
||||
}
|
||||
|
@ -111,7 +126,7 @@ public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
|||
}
|
||||
}
|
||||
if ( statement != null ) {
|
||||
final Set<ResultSet> resultSets = xref.get( statement );
|
||||
final HashMap<ResultSet,Object> resultSets = xref.get( statement );
|
||||
if ( resultSets == null ) {
|
||||
log.unregisteredStatement();
|
||||
}
|
||||
|
@ -123,24 +138,26 @@ public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
|||
}
|
||||
}
|
||||
else {
|
||||
final boolean removed = unassociatedResultSets.remove( resultSet );
|
||||
if ( !removed ) {
|
||||
final Object removed = unassociatedResultSets.remove( resultSet );
|
||||
if ( removed == null ) {
|
||||
log.unregisteredResultSetWithoutStatement();
|
||||
}
|
||||
|
||||
}
|
||||
close( resultSet );
|
||||
}
|
||||
|
||||
protected void closeAll(Set<ResultSet> resultSets) {
|
||||
for ( ResultSet resultSet : resultSets ) {
|
||||
close( resultSet );
|
||||
}
|
||||
private static void closeAll(final HashMap<ResultSet,Object> resultSets) {
|
||||
resultSets.forEach( (resultSet, o) -> close( resultSet ) );
|
||||
resultSets.clear();
|
||||
}
|
||||
|
||||
private static void releaseXref(final Statement s, final HashMap<ResultSet, Object> r) {
|
||||
closeAll( r );
|
||||
close( s );
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public static void close(ResultSet resultSet) {
|
||||
private static void close(final ResultSet resultSet) {
|
||||
log.tracef( "Closing result set [%s]", resultSet );
|
||||
|
||||
try {
|
||||
|
@ -202,7 +219,7 @@ public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
|||
}
|
||||
}
|
||||
if ( statement != null ) {
|
||||
Set<ResultSet> resultSets = xref.get( statement );
|
||||
HashMap<ResultSet,Object> resultSets = xref.get( statement );
|
||||
|
||||
// Keep this at DEBUG level, rather than warn. Numerous connection pool implementations can return a
|
||||
// proxy/wrapper around the JDBC Statement, causing excessive logging here. See HHH-8210.
|
||||
|
@ -210,14 +227,14 @@ public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
|||
log.debug( "ResultSet statement was not registered (on register)" );
|
||||
}
|
||||
|
||||
if ( resultSets == null || resultSets == Collections.EMPTY_SET ) {
|
||||
resultSets = new HashSet<ResultSet>();
|
||||
if ( resultSets == null || resultSets == EMPTY ) {
|
||||
resultSets = new HashMap<ResultSet,Object>();
|
||||
xref.put( statement, resultSets );
|
||||
}
|
||||
resultSets.add( resultSet );
|
||||
resultSets.put( resultSet, PRESENT );
|
||||
}
|
||||
else {
|
||||
unassociatedResultSets.add( resultSet );
|
||||
unassociatedResultSets.put( resultSet, PRESENT );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -303,62 +320,54 @@ public class ResourceRegistryStandardImpl implements ResourceRegistry {
|
|||
jdbcObserver.jdbcReleaseRegistryResourcesStart();
|
||||
}
|
||||
|
||||
for ( Map.Entry<Statement, Set<ResultSet>> entry : xref.entrySet() ) {
|
||||
if ( entry.getValue() != null ) {
|
||||
closeAll( entry.getValue() );
|
||||
}
|
||||
close( entry.getKey() );
|
||||
}
|
||||
xref.forEach( ResourceRegistryStandardImpl::releaseXref );
|
||||
xref.clear();
|
||||
|
||||
closeAll( unassociatedResultSets );
|
||||
|
||||
if ( blobs != null ) {
|
||||
for ( Blob blob : blobs ) {
|
||||
blobs.forEach( blob -> {
|
||||
try {
|
||||
blob.free();
|
||||
}
|
||||
catch (SQLException e) {
|
||||
log.debugf( "Unable to free JDBC Blob reference [%s]", e.getMessage() );
|
||||
}
|
||||
}
|
||||
blobs.clear();
|
||||
} );
|
||||
//for these, it seems better to null the map rather than clear it:
|
||||
blobs = null;
|
||||
}
|
||||
|
||||
if ( clobs != null ) {
|
||||
for ( Clob clob : clobs ) {
|
||||
clobs.forEach( clob -> {
|
||||
try {
|
||||
clob.free();
|
||||
}
|
||||
catch (SQLException e) {
|
||||
log.debugf( "Unable to free JDBC Clob reference [%s]", e.getMessage() );
|
||||
}
|
||||
}
|
||||
clobs.clear();
|
||||
} );
|
||||
clobs = null;
|
||||
}
|
||||
|
||||
if ( nclobs != null ) {
|
||||
for ( NClob nclob : nclobs ) {
|
||||
nclobs.forEach( nclob -> {
|
||||
try {
|
||||
nclob.free();
|
||||
}
|
||||
catch (SQLException e) {
|
||||
log.debugf( "Unable to free JDBC NClob reference [%s]", e.getMessage() );
|
||||
}
|
||||
}
|
||||
nclobs.clear();
|
||||
}
|
||||
|
||||
if ( jdbcObserver != null ) {
|
||||
jdbcObserver.jdbcReleaseRegistryResourcesEnd();
|
||||
} );
|
||||
nclobs = null;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean hasRegistered(Map resource) {
|
||||
private boolean hasRegistered(final HashMap resource) {
|
||||
return resource != null && !resource.isEmpty();
|
||||
}
|
||||
|
||||
private boolean hasRegistered(Collection resource) {
|
||||
private boolean hasRegistered(final ArrayList resource) {
|
||||
return resource != null && !resource.isEmpty();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,8 +9,11 @@ package org.hibernate.resource.jdbc.spi;
|
|||
import java.sql.Connection;
|
||||
|
||||
/**
|
||||
* @deprecated It is no longer possible to plug custom implementations of
|
||||
* this SPI. It will be removed.
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
@Deprecated
|
||||
public interface JdbcObserver {
|
||||
public void jdbcConnectionAcquisitionStart();
|
||||
public void jdbcConnectionAcquisitionEnd(Connection connection);
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
package org.hibernate.resource.transaction.backend.jdbc.internal;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import javax.persistence.RollbackException;
|
||||
import javax.transaction.Status;
|
||||
|
@ -50,7 +51,7 @@ public class JdbcResourceLocalTransactionCoordinatorImpl implements TransactionC
|
|||
|
||||
private int timeOut = -1;
|
||||
|
||||
private final transient List<TransactionObserver> observers;
|
||||
private transient List<TransactionObserver> observers = null;
|
||||
|
||||
/**
|
||||
* Construct a ResourceLocalTransactionCoordinatorImpl instance. package-protected to ensure access goes through
|
||||
|
@ -62,7 +63,6 @@ public class JdbcResourceLocalTransactionCoordinatorImpl implements TransactionC
|
|||
TransactionCoordinatorBuilder transactionCoordinatorBuilder,
|
||||
TransactionCoordinatorOwner owner,
|
||||
JdbcResourceTransactionAccess jdbcResourceTransactionAccess) {
|
||||
this.observers = new ArrayList<>();
|
||||
this.transactionCoordinatorBuilder = transactionCoordinatorBuilder;
|
||||
this.jdbcResourceTransactionAccess = jdbcResourceTransactionAccess;
|
||||
this.transactionCoordinatorOwner = owner;
|
||||
|
@ -81,7 +81,12 @@ public class JdbcResourceLocalTransactionCoordinatorImpl implements TransactionC
|
|||
* @return TransactionObserver
|
||||
*/
|
||||
private Iterable<TransactionObserver> observers() {
|
||||
return new ArrayList<>( observers );
|
||||
if ( observers == null || observers.isEmpty() ) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
else {
|
||||
return new ArrayList<>( observers );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -203,12 +208,17 @@ public class JdbcResourceLocalTransactionCoordinatorImpl implements TransactionC
|
|||
|
||||
@Override
|
||||
public void addObserver(TransactionObserver observer) {
|
||||
if ( observers == null ) {
|
||||
observers = new ArrayList<>( 6 );
|
||||
}
|
||||
observers.add( observer );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeObserver(TransactionObserver observer) {
|
||||
observers.remove( observer );
|
||||
if ( observers != null ) {
|
||||
observers.remove( observer );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -62,7 +62,7 @@ public class JtaTransactionCoordinatorImpl implements TransactionCoordinator, Sy
|
|||
|
||||
private int timeOut = -1;
|
||||
|
||||
private final transient List<TransactionObserver> observers;
|
||||
private transient List<TransactionObserver> observers = null;
|
||||
|
||||
/**
|
||||
* Construct a JtaTransactionCoordinatorImpl instance. package-protected to ensure access goes through
|
||||
|
@ -79,8 +79,6 @@ public class JtaTransactionCoordinatorImpl implements TransactionCoordinator, Sy
|
|||
this.transactionCoordinatorOwner = owner;
|
||||
this.autoJoinTransactions = autoJoinTransactions;
|
||||
|
||||
this.observers = new ArrayList<>();
|
||||
|
||||
final JdbcSessionContext jdbcSessionContext = owner.getJdbcSessionOwner().getJdbcSessionContext();
|
||||
|
||||
this.jtaPlatform = jdbcSessionContext.getServiceRegistry().getService( JtaPlatform.class );
|
||||
|
@ -109,9 +107,8 @@ public class JtaTransactionCoordinatorImpl implements TransactionCoordinator, Sy
|
|||
this.preferUserTransactions = preferUserTransactions;
|
||||
this.performJtaThreadTracking = performJtaThreadTracking;
|
||||
|
||||
this.observers = new ArrayList<>();
|
||||
|
||||
if ( observers != null ) {
|
||||
this.observers = new ArrayList<>( observers.length );
|
||||
Collections.addAll( this.observers, observers );
|
||||
}
|
||||
|
||||
|
@ -123,11 +120,17 @@ public class JtaTransactionCoordinatorImpl implements TransactionCoordinator, Sy
|
|||
/**
|
||||
* Needed because while iterating the observers list and executing the before/update callbacks,
|
||||
* some observers might get removed from the list.
|
||||
* Yet try to not allocate anything for when the list is empty, as this is a common case.
|
||||
*
|
||||
* @return TransactionObserver
|
||||
*/
|
||||
private Iterable<TransactionObserver> observers() {
|
||||
return new ArrayList<>( observers );
|
||||
if ( this.observers == null ) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
else {
|
||||
return new ArrayList<>( this.observers );
|
||||
}
|
||||
}
|
||||
|
||||
public SynchronizationCallbackCoordinator getSynchronizationCallbackCoordinator() {
|
||||
|
@ -388,12 +391,17 @@ public class JtaTransactionCoordinatorImpl implements TransactionCoordinator, Sy
|
|||
}
|
||||
|
||||
public void addObserver(TransactionObserver observer) {
|
||||
if ( this.observers == null ) {
|
||||
this.observers = new ArrayList<>( 3 ); //These lists are typically very small.
|
||||
}
|
||||
observers.add( observer );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeObserver(TransactionObserver observer) {
|
||||
observers.remove( observer );
|
||||
if ( observers != null ) {
|
||||
observers.remove( observer );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -38,12 +38,12 @@ public class Insert {
|
|||
}
|
||||
|
||||
public Insert addColumn(String columnName) {
|
||||
return addColumn(columnName, "?");
|
||||
return addColumn( columnName, "?" );
|
||||
}
|
||||
|
||||
public Insert addColumns(String[] columnNames) {
|
||||
for ( int i=0; i<columnNames.length; i++ ) {
|
||||
addColumn( columnNames[i] );
|
||||
for ( String columnName : columnNames ) {
|
||||
addColumn( columnName );
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -67,12 +67,12 @@ public class Insert {
|
|||
}
|
||||
|
||||
public Insert addColumn(String columnName, String valueExpression) {
|
||||
columns.put(columnName, valueExpression);
|
||||
columns.put( columnName, valueExpression );
|
||||
return this;
|
||||
}
|
||||
|
||||
public Insert addColumn(String columnName, Object value, LiteralType type) throws Exception {
|
||||
return addColumn( columnName, type.objectToSQLString(value, dialect) );
|
||||
return addColumn( columnName, type.objectToSQLString( value, dialect ) );
|
||||
}
|
||||
|
||||
public Insert addIdentityColumn(String columnName) {
|
||||
|
|
|
@ -44,8 +44,8 @@ public class InsertSelect {
|
|||
}
|
||||
|
||||
public InsertSelect addColumns(String[] columnNames) {
|
||||
for ( int i = 0; i < columnNames.length; i++ ) {
|
||||
this.columnNames.add( columnNames[i] );
|
||||
for ( String columnName : columnNames ) {
|
||||
this.columnNames.add( columnName );
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue