HHH-16441 - Improve support for @BatchSize

HHH-16466 - ARRAY parameter support for multi-key loads
HHH-16509 - Split parameter limit and IN element limit

(cherry picked from commit 25a23fd1c0)
This commit is contained in:
Steve Ebersole 2023-04-05 19:16:49 -05:00
parent 3f1b1f45c5
commit 1ce3e8eaab
92 changed files with 3908 additions and 1006 deletions

View File

@ -204,3 +204,19 @@ The following table illustrates a list of commands for various databases that ca
|`./docker_db.sh cockroachdb` |`./docker_db.sh cockroachdb`
|`./gradlew test -Pdb=cockroachdb` |`./gradlew test -Pdb=cockroachdb`
|=== |===
To stop a container started by `docker`, use the command
[source]
----
docker stop $container_name
----
NOTE:: Substitute `podman` command for `docker` if using `podman`
E.g., to stop the mariadb container
[source]
----
docker stop mariadb
----

View File

@ -628,6 +628,14 @@ public class DerbyDialect extends Dialect {
return false; return false;
} }
@Override
public int getInExpressionCountLimit() {
// Derby does not have a limit on the number of expressions/parameters per-se (it may, I just
// don't know). It does, however, have a limit on the size of the SQL text it will accept as a
// PreparedStatement; so let's limit this to a sensible value to avoid that.
return 512;
}
@Override @Override
public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() { public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() {
return (sqlException, message, sql) -> { return (sqlException, message, sql) -> {

View File

@ -109,7 +109,7 @@ import org.hibernate.internal.util.MathHelper;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.ArrayHelper; import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.internal.util.io.StreamCopier; import org.hibernate.internal.util.io.StreamCopier;
import org.hibernate.loader.BatchLoadSizingStrategy; import org.hibernate.loader.ast.spi.MultiKeyLoadSizingStrategy;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
import org.hibernate.mapping.Constraint; import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.ForeignKey;
@ -178,9 +178,6 @@ import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType; import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType; import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.ClobJdbcType; import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
import org.hibernate.type.descriptor.jdbc.TimeUtcAsOffsetTimeJdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampUtcAsJdbcTimestampJdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampUtcAsOffsetDateTimeJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter; import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType; import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.LongNVarcharJdbcType; import org.hibernate.type.descriptor.jdbc.LongNVarcharJdbcType;
@ -188,6 +185,9 @@ import org.hibernate.type.descriptor.jdbc.NCharJdbcType;
import org.hibernate.type.descriptor.jdbc.NClobJdbcType; import org.hibernate.type.descriptor.jdbc.NClobJdbcType;
import org.hibernate.type.descriptor.jdbc.NVarcharJdbcType; import org.hibernate.type.descriptor.jdbc.NVarcharJdbcType;
import org.hibernate.type.descriptor.jdbc.TimeUtcAsJdbcTimeJdbcType; import org.hibernate.type.descriptor.jdbc.TimeUtcAsJdbcTimeJdbcType;
import org.hibernate.type.descriptor.jdbc.TimeUtcAsOffsetTimeJdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampUtcAsJdbcTimestampJdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampUtcAsOffsetDateTimeJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType; import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl; import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
@ -3797,6 +3797,18 @@ public abstract class Dialect implements ConversionContext, TypeContributor, Fun
return 0; return 0;
} }
/**
* Return the limit that the underlying database places on the number of parameters
* that can be defined for a PreparedStatement. If the database defines no such
* limits, simply return zero or a number smaller than zero. By default, Dialect
* returns the same value as {@link #getInExpressionCountLimit()}.
*
* @return The limit, or a non-positive integer to indicate no limit.
*/
public int getParameterCountLimit() {
return getInExpressionCountLimit();
}
/** /**
* Must LOB values occur last in inserts and updates? * Must LOB values occur last in inserts and updates?
* *
@ -4031,41 +4043,44 @@ public abstract class Dialect implements ConversionContext, TypeContributor, Fun
return null; return null;
} }
protected final BatchLoadSizingStrategy STANDARD_DEFAULT_BATCH_LOAD_SIZING_STRATEGY = /**
(numberOfKeyColumns, numberOfKeys, inClauseParameterPaddingEnabled) -> { * The strategy used to determine the appropriate number of keys
final int paddedSize; * to load in a single SQL query with multi-key loading.
* @see org.hibernate.Session#byMultipleIds
if ( inClauseParameterPaddingEnabled ) { * @see org.hibernate.Session#byMultipleNaturalId
paddedSize = MathHelper.ceilingPowerOfTwo( numberOfKeys ); */
} public MultiKeyLoadSizingStrategy getMultiKeyLoadSizingStrategy() {
else { return STANDARD_MULTI_KEY_LOAD_SIZING_STRATEGY;
paddedSize = numberOfKeys; }
}
// For tuples, there is no limit, so we can just use the power of two padding approach
if ( numberOfKeyColumns > 1 ) {
return paddedSize;
}
final int inExpressionCountLimit = getInExpressionCountLimit();
if ( inExpressionCountLimit > 0 ) {
if ( paddedSize < inExpressionCountLimit ) {
return paddedSize;
}
else if ( numberOfKeys < inExpressionCountLimit ) {
return numberOfKeys;
}
return getInExpressionCountLimit();
}
return paddedSize;
};
/** /**
* The strategy to use for determining batch sizes in batch loading. * The strategy used to determine the appropriate number of keys
* to load in a single SQL query with batch-fetch loading.
*
* @implNote By default, the same as {@linkplain #getMultiKeyLoadSizingStrategy}
*
* @see org.hibernate.annotations.BatchSize
*/ */
public BatchLoadSizingStrategy getDefaultBatchLoadSizingStrategy() { public MultiKeyLoadSizingStrategy getBatchLoadSizingStrategy() {
return STANDARD_DEFAULT_BATCH_LOAD_SIZING_STRATEGY; return getMultiKeyLoadSizingStrategy();
} }
protected final MultiKeyLoadSizingStrategy STANDARD_MULTI_KEY_LOAD_SIZING_STRATEGY = (numberOfColumns, numberOfKeys, pad) -> {
numberOfKeys = pad ? MathHelper.ceilingPowerOfTwo( numberOfKeys ) : numberOfKeys;
final long parameterCount = (long) numberOfColumns * numberOfKeys;
final int limit = getParameterCountLimit();
if ( limit > 0 ) {
// the Dialect reported a limit - see if the parameter count exceeds the limit
if ( parameterCount >= limit ) {
return limit / numberOfColumns;
}
}
return numberOfKeys;
};
/** /**
* Is JDBC statement warning logging enabled by default? * Is JDBC statement warning logging enabled by default?
* *

View File

@ -47,7 +47,7 @@ import org.hibernate.engine.jdbc.env.spi.SchemaNameResolver;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.exception.spi.SQLExceptionConversionDelegate; import org.hibernate.exception.spi.SQLExceptionConversionDelegate;
import org.hibernate.exception.spi.ViolatedConstraintNameExtractor; import org.hibernate.exception.spi.ViolatedConstraintNameExtractor;
import org.hibernate.loader.BatchLoadSizingStrategy; import org.hibernate.loader.ast.spi.MultiKeyLoadSizingStrategy;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
import org.hibernate.mapping.Constraint; import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.ForeignKey;
@ -90,7 +90,6 @@ import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
import jakarta.persistence.GenerationType;
import jakarta.persistence.TemporalType; import jakarta.persistence.TemporalType;
/** /**
@ -1161,6 +1160,11 @@ public class DialectDelegateWrapper extends Dialect {
return wrapped.getInExpressionCountLimit(); return wrapped.getInExpressionCountLimit();
} }
@Override
public int getParameterCountLimit() {
return wrapped.getParameterCountLimit();
}
@Override @Override
public boolean forceLobAsLastValue() { public boolean forceLobAsLastValue() {
return wrapped.forceLobAsLastValue(); return wrapped.forceLobAsLastValue();
@ -1247,8 +1251,13 @@ public class DialectDelegateWrapper extends Dialect {
} }
@Override @Override
public BatchLoadSizingStrategy getDefaultBatchLoadSizingStrategy() { public MultiKeyLoadSizingStrategy getBatchLoadSizingStrategy() {
return wrapped.getDefaultBatchLoadSizingStrategy(); return wrapped.getBatchLoadSizingStrategy();
}
@Override
public MultiKeyLoadSizingStrategy getMultiKeyLoadSizingStrategy() {
return wrapped.getMultiKeyLoadSizingStrategy();
} }
@Override @Override

View File

@ -25,6 +25,7 @@ import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple; import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.Summarization; import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate; import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.select.QueryPart; import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.exec.spi.JdbcOperation; import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType; import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
@ -57,6 +58,15 @@ public class HSQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
} }
} }
@Override
public void visitInArrayPredicate(InArrayPredicate inArrayPredicate) {
// column in ( unnest(?) )
inArrayPredicate.getTestExpression().accept( this );
appendSql( " in (unnest(" );
inArrayPredicate.getArrayParameter().accept( this );
appendSql( "))" );
}
@Override @Override
protected boolean supportsArrayConstructor() { protected boolean supportsArrayConstructor() {
return true; return true;

View File

@ -18,6 +18,7 @@ import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal; import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization; import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate; import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.LikePredicate; import org.hibernate.sql.ast.tree.predicate.LikePredicate;
import org.hibernate.sql.ast.tree.predicate.NullnessPredicate; import org.hibernate.sql.ast.tree.predicate.NullnessPredicate;
import org.hibernate.sql.ast.tree.select.QueryGroup; import org.hibernate.sql.ast.tree.select.QueryGroup;
@ -38,6 +39,19 @@ public class PostgreSQLSqlAstTranslator<T extends JdbcOperation> extends SqlAstT
super( sessionFactory, statement ); super( sessionFactory, statement );
} }
@Override
public void visitInArrayPredicate(InArrayPredicate inArrayPredicate) {
inArrayPredicate.getTestExpression().accept( this );
appendSql( " = any (" );
inArrayPredicate.getArrayParameter().accept( this );
appendSql( ")" );
}
@Override
protected String getArrayContainsFunction() {
return super.getArrayContainsFunction();
}
@Override @Override
protected void renderInsertIntoNoColumns(TableInsertStandard tableInsert) { protected void renderInsertIntoNoColumns(TableInsertStandard tableInsert) {
renderIntoIntoAndTable( tableInsert ); renderIntoIntoAndTable( tableInsert );

View File

@ -118,7 +118,13 @@ import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithM
*/ */
public class SQLServerDialect extends AbstractTransactSQLDialect { public class SQLServerDialect extends AbstractTransactSQLDialect {
private final static DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 10, 0 ); private final static DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 10, 0 );
private static final int PARAM_LIST_SIZE_LIMIT = 2100;
/**
* NOTE : 2100 is the documented limit supposedly - but in my testing, sending
* 2100 parameters fails saying it must be less than 2100.
*/
private static final int PARAM_LIST_SIZE_LIMIT = 2048;
// See microsoft.sql.Types.GEOMETRY // See microsoft.sql.Types.GEOMETRY
private static final int GEOMETRY_TYPE_CODE = -157; private static final int GEOMETRY_TYPE_CODE = -157;
// See microsoft.sql.Types.GEOGRAPHY // See microsoft.sql.Types.GEOGRAPHY

View File

@ -74,7 +74,10 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
private static final DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 16, 0 ); private static final DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 16, 0 );
//All Sybase dialects share an IN list size limit. //All Sybase dialects share an IN list size limit.
private static final int PARAM_LIST_SIZE_LIMIT = 250000; private static final int IN_LIST_SIZE_LIMIT = 250000;
private static final int PARAM_COUNT_LIMIT = 2000;
private final UniqueDelegate uniqueDelegate = new SkipNullableUniqueDelegate(this); private final UniqueDelegate uniqueDelegate = new SkipNullableUniqueDelegate(this);
public SybaseDialect() { public SybaseDialect() {
@ -163,7 +166,12 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
@Override @Override
public int getInExpressionCountLimit() { public int getInExpressionCountLimit() {
return PARAM_LIST_SIZE_LIMIT; return IN_LIST_SIZE_LIMIT;
}
@Override
public int getParameterCountLimit() {
return PARAM_COUNT_LIMIT;
} }
@Override @Override

View File

@ -80,6 +80,17 @@ public class BatchFetchQueueHelper {
batchFetchQueue.removeBatchLoadableEntityKey( entityKey ); batchFetchQueue.removeBatchLoadableEntityKey( entityKey );
} }
/**
* Remove the entity key with the specified {@code id} and {@code persister} from
* the batch loadable entities {@link BatchFetchQueue}.
*/
public static void removeBatchLoadableEntityKey(
EntityKey entityKey,
SharedSessionContractImplementor session) {
final BatchFetchQueue batchFetchQueue = session.getPersistenceContextInternal().getBatchFetchQueue();
batchFetchQueue.removeBatchLoadableEntityKey( entityKey );
}
public static void removeBatchLoadableEntityKey( public static void removeBatchLoadableEntityKey(
Object id, Object id,
EntityMappingType entityMappingType, EntityMappingType entityMappingType,

View File

@ -0,0 +1,21 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.engine.profile.internal;
import org.hibernate.engine.profile.Fetch;
/**
* Commonality between entities and collections as something that can be affected by fetch profiles.
*
* @author Steve Ebersole
*/
public interface FetchProfileAffectee {
/**
* Register the profile name with the entity/collection
*/
void registerAffectingFetchProfile(String fetchProfileName, Fetch.Style fetchStyle);
}

View File

@ -16,8 +16,11 @@ import org.hibernate.cache.spi.access.EntityDataAccess;
import org.hibernate.collection.spi.PersistentCollection; import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.internal.CacheHelper; import org.hibernate.engine.internal.CacheHelper;
import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.util.IndexedConsumer;
import org.hibernate.internal.util.collections.CollectionHelper; import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType; import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
@ -183,6 +186,61 @@ public class BatchFetchQueue {
return false; return false;
} }
/**
* A "collector" form of {@link #getBatchLoadableEntityIds}. Useful
* in cases where we want a specially created array/container - allows
* creation of concretely typed array for ARRAY param binding to ensure
* the driver does not need to cast/copy the values array.
*/
public <T> void collectBatchLoadableEntityIds(
final int domainBatchSize,
IndexedConsumer<T> collector,
final T loadingId,
final EntityMappingType entityDescriptor) {
// make sure we load the id being loaded in the batch!
collector.accept( 0, loadingId );
if ( batchLoadableEntityKeys == null ) {
return;
}
final LinkedHashSet<EntityKey> set = batchLoadableEntityKeys.get( entityDescriptor.getEntityName() );
if ( set == null ) {
return;
}
final EntityIdentifierMapping identifierMapping = entityDescriptor.getIdentifierMapping();
int batchPosition = 1;
int end = -1;
boolean checkForEnd = false;
for ( EntityKey key : set ) {
if ( checkForEnd && batchPosition == end ) {
// the first id found after the given id
return;
}
if ( identifierMapping.areEqual( loadingId, key.getIdentifier(), context.getSession() ) ) {
end = batchPosition;
}
else {
if ( !isCached( key, entityDescriptor.getEntityPersister() ) ) {
//noinspection unchecked
collector.accept( batchPosition++, (T) key.getIdentifier() );
}
}
if ( batchPosition == domainBatchSize ) {
// end of array, start filling again from start
batchPosition = 1;
if ( end != -1 ) {
checkForEnd = true;
}
}
}
}
/** /**
* Get a batch of unloaded identifiers for this class, using a slightly * Get a batch of unloaded identifiers for this class, using a slightly
* complex algorithm that tries to grab keys registered immediately after * complex algorithm that tries to grab keys registered immediately after
@ -292,6 +350,88 @@ public class BatchFetchQueue {
} }
} }
/**
* A "collector" form of {@link #getCollectionBatch}. Useful
* in cases where we want a specially created array/container - allows
* creation of concretely typed array for ARRAY param binding to ensure
* the driver does not need to cast/copy the values array.
*/
public <T> void collectBatchLoadableCollectionKeys(
int batchSize,
IndexedConsumer<T> collector,
T keyBeingLoaded,
PluralAttributeMapping pluralAttributeMapping) {
collector.accept( 0, keyBeingLoaded );
if ( batchLoadableCollections == null ) {
return;
}
int i = 1;
int end = -1;
boolean checkForEnd = false;
final LinkedHashMap<CollectionEntry, PersistentCollection<?>> map = batchLoadableCollections.get( pluralAttributeMapping.getNavigableRole().getFullPath() );
if ( map == null ) {
return;
}
for ( Entry<CollectionEntry, PersistentCollection<?>> me : map.entrySet() ) {
final CollectionEntry ce = me.getKey();
final PersistentCollection<?> collection = me.getValue();
if ( ce.getLoadedKey() == null ) {
// the loadedKey of the collectionEntry might be null as it might have been reset to null
// (see for example Collections.processDereferencedCollection()
// and CollectionEntry.afterAction())
// though we clear the queue on flush, it seems like a good idea to guard
// against potentially null loadedKeys (which leads to various NPEs as demonstrated in HHH-7821).
continue;
}
if ( collection.wasInitialized() ) {
// should never happen
LOG.warn( "Encountered initialized collection in BatchFetchQueue, this should not happen." );
continue;
}
if ( checkForEnd && i == end ) {
// the first key found after the given key
return;
}
final boolean isEqual = pluralAttributeMapping.getKeyDescriptor().areEqual(
keyBeingLoaded,
ce.getLoadedKey(),
context.getSession()
);
// final boolean isEqual = collectionPersister.getKeyType().isEqual(
// id,
// ce.getLoadedKey(),
// collectionPersister.getFactory()
// );
if ( isEqual ) {
end = i;
}
else if ( !isCached( ce.getLoadedKey(), pluralAttributeMapping.getCollectionDescriptor() ) ) {
//noinspection unchecked
collector.accept( i++, (T) ce.getLoadedKey() );
}
if ( i == batchSize ) {
//end of array, start filling again from start
i = 1;
if ( end != -1 ) {
checkForEnd = true;
}
}
}
//we ran out of keys to try
}
/** /**
* Get a batch of uninitialized collection keys for a given role * Get a batch of uninitialized collection keys for a given role
* *

View File

@ -11,16 +11,18 @@ import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.engine.profile.Association; import org.hibernate.engine.profile.Association;
import org.hibernate.engine.profile.Fetch; import org.hibernate.engine.profile.Fetch;
import org.hibernate.engine.profile.FetchProfile; import org.hibernate.engine.profile.FetchProfile;
import org.hibernate.engine.profile.internal.FetchProfileAffectee;
import org.hibernate.metamodel.MappingMetamodel; import org.hibernate.metamodel.MappingMetamodel;
import org.hibernate.metamodel.mapping.EntityValuedModelPart;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Loadable;
import org.hibernate.type.Type;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
/** /**
* Creates {@link FetchProfile}s. * Create {@link FetchProfile} references from {@link org.hibernate.mapping.FetchProfile} references
* *
* @author Gavin King * @author Gavin King
*/ */
@ -40,28 +42,53 @@ public class FetchProfileHelper {
private static FetchProfile createFetchProfile( private static FetchProfile createFetchProfile(
MappingMetamodel mappingMetamodel, MappingMetamodel mappingMetamodel,
org.hibernate.mapping.FetchProfile mappingProfile) { org.hibernate.mapping.FetchProfile mappingProfile) {
final FetchProfile fetchProfile = new FetchProfile( mappingProfile.getName() ); final String profileName = mappingProfile.getName();
final FetchProfile fetchProfile = new FetchProfile( profileName );
for ( org.hibernate.mapping.FetchProfile.Fetch mappingFetch : mappingProfile.getFetches() ) { for ( org.hibernate.mapping.FetchProfile.Fetch mappingFetch : mappingProfile.getFetches() ) {
// resolve the persister owning the fetch // resolve the persister owning the fetch
final EntityPersister owner = getEntityPersister( mappingMetamodel, fetchProfile, mappingFetch ); final EntityPersister owner = getEntityPersister( mappingMetamodel, fetchProfile, mappingFetch );
( (FetchProfileAffectee) owner ).registerAffectingFetchProfile( profileName, null );
// validate the specified association fetch final Association association = new Association( owner, mappingFetch.getAssociation() );
final Type associationType = owner.getPropertyType( mappingFetch.getAssociation() );
if ( associationType == null || !associationType.isAssociationType() ) {
throw new HibernateException( "Fetch profile [" + fetchProfile.getName()
+ "] specified an association that does not exist [" + mappingFetch.getAssociation() + "]" );
}
// resolve the style
final Fetch.Style fetchStyle = Fetch.Style.parse( mappingFetch.getStyle() ); final Fetch.Style fetchStyle = Fetch.Style.parse( mappingFetch.getStyle() );
// then construct the fetch instance... // validate the specified association fetch
fetchProfile.addFetch( new Association( owner, mappingFetch.getAssociation() ), fetchStyle ); final ModelPart fetchablePart = owner.findByPath( association.getAssociationPath() );
((Loadable) owner).registerAffectingFetchProfile( fetchProfile.getName() ); validateFetchablePart( fetchablePart, profileName, association );
if ( fetchablePart instanceof FetchProfileAffectee ) {
( (FetchProfileAffectee) fetchablePart ).registerAffectingFetchProfile( profileName, fetchStyle );
}
// then register the association with the FetchProfile
fetchProfile.addFetch( association, fetchStyle );
} }
return fetchProfile; return fetchProfile;
} }
private static void validateFetchablePart(ModelPart fetchablePart, String profileName, Association association) {
if ( fetchablePart == null ) {
throw new HibernateException( String.format(
"Fetch profile [%s] specified an association that does not exist - %s",
profileName,
association.getRole()
) );
}
if ( !isAssociation( fetchablePart ) ) {
throw new HibernateException( String.format(
"Fetch profile [%s] specified an association that is not an association - %s",
profileName,
association.getRole()
) );
}
}
private static boolean isAssociation(ModelPart fetchablePart) {
return fetchablePart instanceof EntityValuedModelPart
|| fetchablePart instanceof PluralAttributeMapping;
}
private static EntityPersister getEntityPersister( private static EntityPersister getEntityPersister(
MappingMetamodel mappingMetamodel, MappingMetamodel mappingMetamodel,
FetchProfile fetchProfile, FetchProfile fetchProfile,

View File

@ -13,6 +13,9 @@ import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.function.Consumer; import java.util.function.Consumer;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
@ -286,33 +289,66 @@ public final class ArrayHelper {
public static final Type[] EMPTY_TYPE_ARRAY = {}; public static final Type[] EMPTY_TYPE_ARRAY = {};
public static final byte[] EMPTY_BYTE_ARRAY = {}; public static final byte[] EMPTY_BYTE_ARRAY = {};
// public static int[] getBatchSizes(int maxBatchSize) { /**
// int batchSize = maxBatchSize; * Calculate the batch partitions needed to handle the {@code mappedBatchSize}.
// int n = 1; *
// while ( batchSize > 1 ) { * @param mappedBatchSize The {@link org.hibernate.annotations.BatchSize batch-size}. Internally
// batchSize = getNextBatchSize( batchSize ); * this is capped at {@code 256}
// n++; *
// } * @implNote The max batch size is capped at {@code 256}
// int[] result = new int[n]; *
// batchSize = maxBatchSize; * @return The upper bound for the partitions
// for ( int i = 0; i < n; i++ ) { */
// result[i] = batchSize; public static int[] calculateBatchPartitions(int mappedBatchSize) {
// batchSize = getNextBatchSize( batchSize ); final SortedSet<Integer> partitionSizes = new TreeSet<>( Integer::compareTo );
// } int batchSize = Math.min( mappedBatchSize, 256 );
// return result; while ( batchSize > 1 ) {
// } partitionSizes.add( batchSize );
// batchSize = calculateNextBatchPartitionLimit( batchSize );
// private static int getNextBatchSize(int batchSize) { }
// if ( batchSize <= 10 ) {
// return batchSize - 1; //allow 9,8,7,6,5,4,3,2,1 return ArrayHelper.toIntArray( partitionSizes );
// } }
// else if ( batchSize / 2 < 10 ) {
// return 10; private static int calculateNextBatchPartitionLimit(int batchSize) {
// } if ( batchSize <= 10 ) {
// else { return batchSize - 1; //allow 9,8,7,6,5,4,3,2,1
// return batchSize / 2; }
// } else if ( batchSize / 2 < 10 ) {
// } return 10;
}
else {
return batchSize / 2;
}
}
public static int[] getBatchSizes(int maxBatchSize) {
int batchSize = maxBatchSize;
int n = 1;
while ( batchSize > 1 ) {
batchSize = getNextBatchSize( batchSize );
n++;
}
int[] result = new int[n];
batchSize = maxBatchSize;
for ( int i = 0; i < n; i++ ) {
result[i] = batchSize;
batchSize = getNextBatchSize( batchSize );
}
return result;
}
private static int getNextBatchSize(int batchSize) {
if ( batchSize <= 10 ) {
return batchSize - 1; //allow 9,8,7,6,5,4,3,2,1
}
else if ( batchSize / 2 < 10 ) {
return 10;
}
else {
return batchSize / 2;
}
}
private static final int SEED = 23; private static final int SEED = 23;
private static final int PRIME_NUMBER = 37; private static final int PRIME_NUMBER = 37;
@ -438,4 +474,9 @@ public final class ArrayHelper {
consumer.accept( array[ i ] ); consumer.accept( array[ i ] );
} }
} }
@SuppressWarnings("unchecked")
public static <T> T[] newInstance(Class<T> elementType, int length) {
return (T[]) Array.newInstance( elementType, length );
}
} }

View File

@ -7,6 +7,9 @@
package org.hibernate.loader; package org.hibernate.loader;
import java.util.Locale; import java.util.Locale;
import org.hibernate.loader.ast.spi.MultiKeyLoadSizingStrategy;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
/** /**
@ -16,7 +19,7 @@ import org.jboss.logging.Logger;
* *
* @author Steve Ebersole * @author Steve Ebersole
* *
* @deprecated see {@link BatchLoadSizingStrategy} instead * @deprecated see {@link MultiKeyLoadSizingStrategy} instead
*/ */
@Deprecated(since = "6.0") @Deprecated(since = "6.0")
public enum BatchFetchStyle { public enum BatchFetchStyle {

View File

@ -1,17 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.loader;
/**
* Strategy (pluggable) for determining an optimal size for batch loads.
*
* @author Steve Ebersole
*/
@FunctionalInterface
public interface BatchLoadSizingStrategy {
int determineOptimalBatchLoadSize(int numberOfKeyColumns, int numberOfKeys, boolean inClauseParameterPaddingEnabled);
}

View File

@ -0,0 +1,96 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.spi.CollectionEntry;
import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.CollectionBatchLoader;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.sql.results.internal.ResultsHelper;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/**
* @author Steve Ebersole
*/
public abstract class AbstractCollectionBatchLoader implements CollectionBatchLoader {
private final int domainBatchSize;
private final PluralAttributeMapping attributeMapping;
private final LoadQueryInfluencers influencers;
private final SessionFactoryImplementor sessionFactory;
private final int keyJdbcCount;
public AbstractCollectionBatchLoader(
int domainBatchSize,
LoadQueryInfluencers influencers,
PluralAttributeMapping attributeMapping,
SessionFactoryImplementor sessionFactory) {
this.domainBatchSize = domainBatchSize;
this.attributeMapping = attributeMapping;
this.keyJdbcCount = attributeMapping.getJdbcTypeCount();
this.sessionFactory = sessionFactory;
this.influencers = influencers;
}
@Override
public int getDomainBatchSize() {
return domainBatchSize;
}
@Override
public PluralAttributeMapping getLoadable() {
return attributeMapping;
}
public LoadQueryInfluencers getInfluencers() {
return influencers;
}
public SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
public int getKeyJdbcCount() {
return keyJdbcCount;
}
protected void finishInitializingKey(
Object key,
SharedSessionContractImplementor session) {
if ( key == null ) {
return;
}
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Finishing initializing batch-fetched collection : %s.%s", attributeMapping.getNavigableRole().getFullPath(), key );
}
final PersistenceContext persistenceContext = session.getPersistenceContext();
final CollectionKey collectionKey = new CollectionKey( getLoadable().getCollectionDescriptor(), key );
final PersistentCollection<?> collection = persistenceContext.getCollection( collectionKey );
if ( !collection.wasInitialized() ) {
final CollectionEntry entry = persistenceContext.getCollectionEntry( collection );
collection.initializeEmptyCollection( entry.getLoadedPersister() );
ResultsHelper.finalizeCollectionLoading(
persistenceContext,
entry.getLoadedPersister(),
collection,
collectionKey,
true
);
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.event.spi.EventSource;
import org.hibernate.loader.ast.spi.MultiIdEntityLoader;
import org.hibernate.loader.ast.spi.MultiIdLoadOptions;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import java.util.List;
/**
* Base support for MultiIdEntityLoader implementations
*
* @author Steve Ebersole
*/
public abstract class AbstractMultiIdEntityLoader<T> implements MultiIdEntityLoader<T>, Preparable {
private final EntityMappingType entityDescriptor;
private final SessionFactoryImplementor sessionFactory;
private EntityIdentifierMapping identifierMapping;
public AbstractMultiIdEntityLoader(EntityMappingType entityDescriptor, SessionFactoryImplementor sessionFactory) {
this.entityDescriptor = entityDescriptor;
this.sessionFactory = sessionFactory;
}
@Override
public void prepare() {
identifierMapping = getLoadable().getIdentifierMapping();
}
protected EntityMappingType getEntityDescriptor() {
return entityDescriptor;
}
protected SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
public EntityIdentifierMapping getIdentifierMapping() {
return identifierMapping;
}
@Override
public EntityMappingType getLoadable() {
return getEntityDescriptor();
}
@Override
public final <K> List<T> load(K[] ids, MultiIdLoadOptions loadOptions, EventSource session) {
assert ids != null;
if ( loadOptions.isOrderReturnEnabled() ) {
return performOrderedMultiLoad( ids, loadOptions, session );
}
else {
return performUnorderedMultiLoad( ids, loadOptions, session );
}
}
protected abstract <K> List<T> performOrderedMultiLoad(K[] ids, MultiIdLoadOptions loadOptions, EventSource session);
protected abstract <K> List<T> performUnorderedMultiLoad(K[] ids, MultiIdLoadOptions loadOptions, EventSource session);
}

View File

@ -0,0 +1,35 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import java.util.Map;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.loader.ast.spi.BatchLoaderFactory;
import org.hibernate.service.spi.ServiceRegistryImplementor;
/**
* Initiator for {@link StandardBatchLoaderFactory}
*
* @author Steve Ebersole
*/
public class BatchLoaderFactoryInitiator implements StandardServiceInitiator<BatchLoaderFactory> {
/**
* Singleton access
*/
public static final BatchLoaderFactoryInitiator INSTANCE = new BatchLoaderFactoryInitiator();
@Override
public BatchLoaderFactory initiateService(Map<String, Object> configurationValues, ServiceRegistryImplementor registry) {
return new StandardBatchLoaderFactory( configurationValues, registry );
}
@Override
public Class<BatchLoaderFactory> getServiceInitiated() {
return BatchLoaderFactory.class;
}
}

View File

@ -104,6 +104,32 @@ public class CacheEntityLoaderHelper {
return new PersistenceContextEntry( old, EntityStatus.MANAGED ); return new PersistenceContextEntry( old, EntityStatus.MANAGED );
} }
/**
* Attempts to locate the entity in the session-level cache.
* <p>
* If allowed to return nulls, then if the entity happens to be found in
* the session cache, we check the entity type for proper handling
* of entity hierarchies.
* <p>
* If checkDeleted was set to true, then if the entity is found in the
* session-level cache, its current status within the session cache
* is checked to see if it has previously been scheduled for deletion.
*
* @param event The load event
* @param keyToLoad The EntityKey representing the entity to be loaded.
* @param options The load options.
*
* @return The entity from the session-level cache, or null.
*
* @throws HibernateException Generally indicates problems applying a lock-mode.
*/
public static PersistenceContextEntry loadFromSessionCacheStatic(
final LoadEvent event,
final EntityKey keyToLoad,
final LoadEventListener.LoadType options) {
return INSTANCE.loadFromSessionCache( event, keyToLoad, options );
}
/** /**
* Attempts to locate the entity in the session-level cache. * Attempts to locate the entity in the session-level cache.
* <p> * <p>

View File

@ -0,0 +1,163 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import java.lang.reflect.Array;
import java.util.Collections;
import org.hibernate.LockOptions;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.loader.ast.spi.CollectionBatchLoader;
import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.internal.SimpleForeignKeyDescriptor;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterBindingImpl;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.internal.JdbcParameterImpl;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.hibernate.type.BasicType;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/**
* CollectionBatchLoader using a SQL ARRAY parameter to pass the key values
*
* @author Steve Ebersole
*/
public class CollectionBatchLoaderArrayParam
extends AbstractCollectionBatchLoader
implements CollectionBatchLoader, SqlArrayMultiKeyLoader {
private final Class<?> arrayElementType;
private final JdbcMapping arrayJdbcMapping;
private final JdbcParameter jdbcParameter;
private final SelectStatement sqlSelect;
private final JdbcOperationQuerySelect jdbcSelectOperation;
public CollectionBatchLoaderArrayParam(
int domainBatchSize,
LoadQueryInfluencers loadQueryInfluencers,
PluralAttributeMapping attributeMapping,
SessionFactoryImplementor sessionFactory) {
super( domainBatchSize, loadQueryInfluencers, attributeMapping, sessionFactory );
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Using ARRAY batch fetching strategy for collection `%s` : %s",
attributeMapping.getNavigableRole().getFullPath(),
domainBatchSize
);
}
final SimpleForeignKeyDescriptor keyDescriptor = (SimpleForeignKeyDescriptor) getLoadable().getKeyDescriptor();
arrayElementType = keyDescriptor.getJavaType().getJavaTypeClass();
Class<?> arrayClass = Array.newInstance( arrayElementType, 0 ).getClass();
final BasicType<?> arrayBasicType = getSessionFactory().getTypeConfiguration()
.getBasicTypeRegistry()
.getRegisteredType( arrayClass );
arrayJdbcMapping = MultiKeyLoadHelper.resolveArrayJdbcMapping(
arrayBasicType,
keyDescriptor.getJdbcMapping(),
arrayClass,
getSessionFactory()
);
jdbcParameter = new JdbcParameterImpl( arrayJdbcMapping );
sqlSelect = LoaderSelectBuilder.createSelectBySingleArrayParameter(
getLoadable(),
keyDescriptor.getKeyPart(),
getInfluencers(),
LockOptions.NONE,
jdbcParameter,
getSessionFactory()
);
jdbcSelectOperation = getSessionFactory().getJdbcServices()
.getJdbcEnvironment()
.getSqlAstTranslatorFactory()
.buildSelectTranslator( getSessionFactory(), sqlSelect )
.translate( JdbcParameterBindings.NO_BINDINGS, QueryOptions.NONE );
}
@Override
public PersistentCollection<?> load(Object key, SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Batch loading entity `%s#%s`", getLoadable().getNavigableRole().getFullPath(), key );
}
final Object[] keysToInitialize = resolveKeysToInitialize( key, session );
initializeKeys( keysToInitialize, session );
for ( int i = 0; i < keysToInitialize.length; i++ ) {
finishInitializingKey( keysToInitialize[i], session );
}
final CollectionKey collectionKey = new CollectionKey( getLoadable().getCollectionDescriptor(), key );
return session.getPersistenceContext().getCollection( collectionKey );
}
private Object[] resolveKeysToInitialize(Object keyBeingLoaded, SharedSessionContractImplementor session) {
final Object[] keysToInitialize = (Object[]) Array.newInstance( arrayElementType, getDomainBatchSize() );
session.getPersistenceContextInternal().getBatchFetchQueue().collectBatchLoadableCollectionKeys(
getDomainBatchSize(),
(index, value) -> keysToInitialize[index] = value,
keyBeingLoaded,
getLoadable()
);
return keysToInitialize;
}
private void initializeKeys(Object[] keysToInitialize, SharedSessionContractImplementor session) {
assert jdbcSelectOperation != null;
assert jdbcParameter != null;
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl(1);
jdbcParameterBindings.addBinding(
jdbcParameter,
new JdbcParameterBindingImpl( arrayJdbcMapping, keysToInitialize )
);
final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler(
session.getPersistenceContext().getBatchFetchQueue(),
sqlSelect,
Collections.singletonList( jdbcParameter ),
jdbcParameterBindings
);
session.getJdbcServices().getJdbcSelectExecutor().list(
jdbcSelectOperation,
jdbcParameterBindings,
new SingleIdExecutionContext(
null,
null,
null,
LockOptions.NONE,
subSelectFetchableKeysHandler,
session
),
RowTransformerStandardImpl.instance(),
ListResultsConsumer.UniqueSemantic.FILTER
);
}
public void prepare() {
}
}

View File

@ -0,0 +1,209 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.loader.ast.internal;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.LockOptions;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.spi.BatchFetchQueue;
import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.internal.util.MutableInteger;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.loader.ast.spi.CollectionBatchLoader;
import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/**
* CollectionLoader for batch fetching using a SQL IN predicate
*
* @author Steve Ebersole
*/
public class CollectionBatchLoaderInPredicate
extends AbstractCollectionBatchLoader
implements CollectionBatchLoader, SqlArrayMultiKeyLoader {
private final int keyColumnCount;
private final int sqlBatchSize;
private final List<JdbcParameter> jdbcParameters;
private final SelectStatement sqlAst;
private final JdbcOperationQuerySelect jdbcSelect;
private CollectionLoaderSingleKey singleKeyLoader;
public CollectionBatchLoaderInPredicate(
int domainBatchSize,
LoadQueryInfluencers influencers,
PluralAttributeMapping attributeMapping,
SessionFactoryImplementor sessionFactory) {
super( domainBatchSize, influencers, attributeMapping, sessionFactory );
this.keyColumnCount = attributeMapping.getKeyDescriptor().getJdbcTypeCount();
this.sqlBatchSize = sessionFactory.getJdbcServices()
.getDialect()
.getBatchLoadSizingStrategy()
.determineOptimalBatchLoadSize( keyColumnCount, domainBatchSize, false );
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Using IN-predicate batch fetching strategy for collection `%s` : %s (%s)",
attributeMapping.getNavigableRole().getFullPath(),
sqlBatchSize,
domainBatchSize
);
}
this.jdbcParameters = new ArrayList<>();
this.sqlAst = LoaderSelectBuilder.createSelect(
attributeMapping,
null,
attributeMapping.getKeyDescriptor(),
null,
sqlBatchSize,
influencers,
LockOptions.NONE,
jdbcParameters::add,
sessionFactory
);
assert this.jdbcParameters.size() == this.sqlBatchSize * this.keyColumnCount;
this.jdbcSelect = sessionFactory.getJdbcServices()
.getJdbcEnvironment()
.getSqlAstTranslatorFactory()
.buildSelectTranslator( sessionFactory, sqlAst )
.translate( JdbcParameterBindings.NO_BINDINGS, QueryOptions.NONE );
}
@Override
public PersistentCollection<?> load(
Object key,
SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Loading collection `%s#%s` by batch-fetch", getLoadable().getNavigableRole().getFullPath(), key );
}
final MutableInteger nonNullCounter = new MutableInteger();
final ArrayList<Object> keysToInitialize = CollectionHelper.arrayList( getDomainBatchSize() );
session.getPersistenceContextInternal().getBatchFetchQueue().collectBatchLoadableCollectionKeys(
getDomainBatchSize(),
(index, batchableKey) -> {
keysToInitialize.add( batchableKey );
if ( batchableKey != null ) {
nonNullCounter.increment();
}
},
key,
getLoadable().asPluralAttributeMapping()
);
if ( nonNullCounter.get() <= 0 ) {
throw new IllegalStateException( "Number of non-null collection keys to batch fetch should never be 0" );
}
if ( nonNullCounter.get() == 1 ) {
prepareSingleKeyLoaderIfNeeded();
return singleKeyLoader.load( key, session );
}
initializeKeys( key, keysToInitialize.toArray( keysToInitialize.toArray( new Object[0] ) ), nonNullCounter.get(), session );
final CollectionKey collectionKey = new CollectionKey( getLoadable().getCollectionDescriptor(), key );
return session.getPersistenceContext().getCollection( collectionKey );
}
private void prepareSingleKeyLoaderIfNeeded() {
if ( singleKeyLoader == null ) {
singleKeyLoader = new CollectionLoaderSingleKey( getLoadable(), getInfluencers(), getSessionFactory() );
}
}
private <T> void initializeKeys(
T key,
T[] keysToInitialize,
int nonNullKeysToInitializeCount,
SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Collection keys to batch-fetch initialize (`%s#%s`) %s",
getLoadable().getNavigableRole().getFullPath(),
key,
keysToInitialize
);
}
final MultiKeyLoadChunker<T> chunker = new MultiKeyLoadChunker<>(
sqlBatchSize,
keyColumnCount,
getLoadable().getKeyDescriptor(),
jdbcParameters,
sqlAst,
jdbcSelect
);
final BatchFetchQueue batchFetchQueue = session.getPersistenceContextInternal().getBatchFetchQueue();
chunker.processChunks(
keysToInitialize,
nonNullKeysToInitializeCount,
(jdbcParameterBindings, session1) -> {
// Create a RegistrationHandler for handling any subselect fetches we encounter handling this chunk
final SubselectFetch.RegistrationHandler registrationHandler = SubselectFetch.createRegistrationHandler(
batchFetchQueue,
sqlAst,
jdbcParameters,
jdbcParameterBindings
);
return new ExecutionContextWithSubselectFetchHandler( session, registrationHandler );
},
(key1, relativePosition, absolutePosition) -> {
},
(startIndex) -> {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Processing collection batch-fetch chunk (`%s#%s`) %s - %s",
getLoadable().getNavigableRole().getFullPath(),
key,
startIndex,
startIndex + (sqlBatchSize-1)
);
}
},
(startIndex, nonNullElementCount) -> {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Finishing collection batch-fetch chunk (`%s#%s`) %s - %s (%s)",
getLoadable().getNavigableRole().getFullPath(),
key,
startIndex,
startIndex + (sqlBatchSize-1),
nonNullElementCount
);
}
for ( int i = 0; i < nonNullElementCount; i++ ) {
final int keyPosition = i + startIndex;
if ( keyPosition < keysToInitialize.length ) {
final T keyToInitialize = keysToInitialize[keyPosition];
finishInitializingKey( keyToInitialize, session );
}
}
},
session
);
}
}

View File

@ -1,235 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.loader.ast.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hibernate.LockOptions;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.CollectionEntry;
import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.loader.ast.spi.CollectionLoader;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.internal.ResultsHelper;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.jboss.logging.Logger;
/**
* A one-time use CollectionLoader for applying a batch fetch
*
* @author Steve Ebersole
*/
public class CollectionLoaderBatchKey implements CollectionLoader {
private static final Logger log = Logger.getLogger( CollectionLoaderBatchKey.class );
private final PluralAttributeMapping attributeMapping;
private final int batchSize;
private final int keyJdbcCount;
private SelectStatement batchSizeSqlAst;
private List<JdbcParameter> batchSizeJdbcParameters;
public CollectionLoaderBatchKey(
PluralAttributeMapping attributeMapping,
int batchSize,
LoadQueryInfluencers influencers,
SessionFactoryImplementor sessionFactory) {
this.attributeMapping = attributeMapping;
this.batchSize = batchSize;
this.keyJdbcCount = attributeMapping.getKeyDescriptor().getJdbcTypeCount();
this.batchSizeJdbcParameters = new ArrayList<>();
this.batchSizeSqlAst = LoaderSelectBuilder.createSelect(
attributeMapping,
null,
attributeMapping.getKeyDescriptor(),
null,
batchSize,
influencers,
LockOptions.NONE,
batchSizeJdbcParameters::add,
sessionFactory
);
}
@Override
public PluralAttributeMapping getLoadable() {
return attributeMapping;
}
@Override
public PersistentCollection<?> load(
Object key,
SharedSessionContractImplementor session) {
final Object[] batchIds = session.getPersistenceContextInternal()
.getBatchFetchQueue()
.getCollectionBatch( getLoadable().getCollectionDescriptor(), key, batchSize );
final int numberOfIds = ArrayHelper.countNonNull( batchIds );
if ( numberOfIds == 1 ) {
final List<JdbcParameter> jdbcParameters = new ArrayList<>( keyJdbcCount );
final SelectStatement sqlAst = LoaderSelectBuilder.createSelect(
attributeMapping,
null,
attributeMapping.getKeyDescriptor(),
null,
1,
session.getLoadQueryInfluencers(),
LockOptions.NONE,
jdbcParameters::add,
session.getFactory()
);
new SingleIdLoadPlan(
null,
attributeMapping.getKeyDescriptor(),
sqlAst,
jdbcParameters,
LockOptions.NONE,
session.getFactory()
).load( key, session );
}
else {
batchLoad( batchIds, numberOfIds , session );
}
final CollectionKey collectionKey = new CollectionKey( attributeMapping.getCollectionDescriptor(), key );
return session.getPersistenceContext().getCollection( collectionKey );
}
private void batchLoad(
Object[] batchIds,
int numberOfIds,
SharedSessionContractImplementor session) {
if ( log.isDebugEnabled() ) {
log.debugf(
"Batch loading collection [%s] : %s",
getLoadable().getCollectionDescriptor().getRole(),
batchIds
);
}
int smallBatchStart = 0;
int smallBatchLength = Math.min( numberOfIds, batchSize );
while ( true ) {
final List<JdbcParameter> jdbcParameters;
final SelectStatement sqlAst;
if ( smallBatchLength == batchSize ) {
jdbcParameters = this.batchSizeJdbcParameters;
sqlAst = this.batchSizeSqlAst;
}
else {
jdbcParameters = new ArrayList<>();
sqlAst = LoaderSelectBuilder.createSelect(
getLoadable(),
// null here means to select everything
null,
getLoadable().getKeyDescriptor(),
null,
numberOfIds,
session.getLoadQueryInfluencers(),
LockOptions.NONE,
jdbcParameters::add,
session.getFactory()
);
}
final SessionFactoryImplementor sessionFactory = session.getFactory();
final JdbcServices jdbcServices = sessionFactory.getJdbcServices();
final JdbcEnvironment jdbcEnvironment = jdbcServices.getJdbcEnvironment();
final SqlAstTranslatorFactory sqlAstTranslatorFactory = jdbcEnvironment.getSqlAstTranslatorFactory();
final JdbcOperationQuerySelect jdbcSelect = sqlAstTranslatorFactory
.buildSelectTranslator( sessionFactory, sqlAst )
.translate( null, QueryOptions.NONE );
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl( keyJdbcCount * smallBatchLength );
int offset = 0;
for ( int i = smallBatchStart; i < smallBatchStart + smallBatchLength; i++ ) {
offset += jdbcParameterBindings.registerParametersForEachJdbcValue(
batchIds[i],
offset,
getLoadable().getKeyDescriptor(),
jdbcParameters,
session
);
}
assert offset == jdbcParameters.size();
final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler(
session.getPersistenceContext().getBatchFetchQueue(),
sqlAst,
Collections.emptyList(),
jdbcParameterBindings
);
jdbcServices.getJdbcSelectExecutor().list(
jdbcSelect,
jdbcParameterBindings,
new ExecutionContextWithSubselectFetchHandler( session, subSelectFetchableKeysHandler ),
RowTransformerStandardImpl.instance(),
ListResultsConsumer.UniqueSemantic.FILTER
);
for ( int i = smallBatchStart; i < smallBatchStart + smallBatchLength; i++ ) {
// collections that were not initialized here should be empty
finishLoadingCollection( batchIds[i], session );
}
// prepare for the next round...
smallBatchStart += smallBatchLength;
if ( smallBatchStart >= numberOfIds ) {
break;
}
smallBatchLength = Math.min( numberOfIds - smallBatchStart, batchSize );
}
}
private void finishLoadingCollection(Object key, SharedSessionContractImplementor session) {
final PersistenceContext persistenceContext = session.getPersistenceContext();
final CollectionKey collectionKey = new CollectionKey( attributeMapping.getCollectionDescriptor(), key );
final PersistentCollection<?> collection = persistenceContext.getCollection( collectionKey );
if ( !collection.wasInitialized() ) {
final CollectionEntry entry = persistenceContext.getCollectionEntry( collection );
collection.initializeEmptyCollection( entry.getLoadedPersister() );
ResultsHelper.finalizeCollectionLoading(
persistenceContext,
entry.getLoadedPersister(),
collection,
collectionKey,
true
);
}
}
}

View File

@ -0,0 +1,202 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import java.lang.reflect.Array;
import java.util.Collections;
import java.util.Locale;
import org.hibernate.LockOptions;
import org.hibernate.engine.internal.BatchFetchQueueHelper;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.loader.ast.spi.EntityBatchLoader;
import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader;
import org.hibernate.metamodel.mapping.BasicEntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterBindingImpl;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.internal.JdbcParameterImpl;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/**
* SingleIdEntityLoaderSupport implementation based on using a single
* {@linkplain org.hibernate.type.SqlTypes#ARRAY array} parameter to pass the
* entire batch of ids.
*
* @author Steve Ebersole
*/
public class EntityBatchLoaderArrayParam<T>
extends SingleIdEntityLoaderSupport<T>
implements EntityBatchLoader<T>, SqlArrayMultiKeyLoader, Preparable {
private final int domainBatchSize;
private BasicEntityIdentifierMapping identifierMapping;
private JdbcMapping arrayJdbcMapping;
private JdbcParameter jdbcParameter;
private SelectStatement sqlAst;
private JdbcOperationQuerySelect jdbcSelectOperation;
/**
* Instantiates the loader
*
* @param domainBatchSize The number of domain model parts (up to)
*
* @implNote We delay initializing the internal SQL AST state until first use. Creating
* the SQL AST internally relies on the entity's {@link EntityIdentifierMapping}. However, we
* do create the static batch-loader for the entity in the persister constructor and
* {@link EntityIdentifierMapping} is not available at that time. On first use, we know we
* have it available
*/
public EntityBatchLoaderArrayParam(
int domainBatchSize,
EntityMappingType entityDescriptor,
SessionFactoryImplementor sessionFactory) {
super( entityDescriptor, sessionFactory );
this.domainBatchSize = domainBatchSize;
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Batch fetching enabled for `%s` (entity) using ARRAY strategy : %s",
entityDescriptor.getEntityName(),
domainBatchSize
);
}
}
@Override
public int getDomainBatchSize() {
return domainBatchSize;
}
@Override
public final T load(
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Batch fetching entity `%s#%s`", getLoadable().getEntityName(), pkValue );
}
final Object[] ids = resolveIdsToInitialize( pkValue, session );
initializeEntities( ids, pkValue, entityInstance, lockOptions, readOnly, session );
final EntityKey entityKey = session.generateEntityKey( pkValue, getLoadable().getEntityPersister() );
//noinspection unchecked
return (T) session.getPersistenceContext().getEntity( entityKey );
}
protected Object[] resolveIdsToInitialize(Object pkValue, SharedSessionContractImplementor session) {
final Object[] idsToLoad = (Object[]) Array.newInstance( identifierMapping.getJavaType().getJavaTypeClass(), domainBatchSize );
session.getPersistenceContextInternal().getBatchFetchQueue().collectBatchLoadableEntityIds(
domainBatchSize,
(index, value) -> idsToLoad[index] = value,
pkValue,
getLoadable()
);
return idsToLoad;
}
private void initializeEntities(
Object[] idsToInitialize,
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
LoaderHelper.loadByArrayParameter(
idsToInitialize,
sqlAst,
jdbcSelectOperation,
jdbcParameter,
arrayJdbcMapping,
pkValue,
entityInstance,
lockOptions,
readOnly,
session
);
//noinspection ForLoopReplaceableByForEach
for ( int i = 0; i < idsToInitialize.length; i++ ) {
final Object id = idsToInitialize[i];
if ( id == null ) {
// skip any of the null padded ids
// - actually we could probably even break here
continue;
}
// found or not, remove the key from the batch-fetch queue
BatchFetchQueueHelper.removeBatchLoadableEntityKey( id, getLoadable(), session );
}
}
@Override
public T load(Object pkValue, LockOptions lockOptions, Boolean readOnly, SharedSessionContractImplementor session) {
return load( pkValue, null, lockOptions, readOnly, session );
}
@Override
public void prepare() {
identifierMapping = (BasicEntityIdentifierMapping) getLoadable().getIdentifierMapping();
final Class<?> arrayClass = Array.newInstance( identifierMapping.getJavaType().getJavaTypeClass(), 0 ).getClass();
final BasicTypeRegistry basicTypeRegistry = sessionFactory.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<?> arrayBasicType = basicTypeRegistry.getRegisteredType( arrayClass );
arrayJdbcMapping = MultiKeyLoadHelper.resolveArrayJdbcMapping(
arrayBasicType,
identifierMapping.getJdbcMapping(),
arrayClass,
sessionFactory
);
jdbcParameter = new JdbcParameterImpl( arrayJdbcMapping );
sqlAst = LoaderSelectBuilder.createSelectBySingleArrayParameter(
getLoadable(),
identifierMapping,
LoadQueryInfluencers.NONE,
LockOptions.NONE,
jdbcParameter,
sessionFactory
);
jdbcSelectOperation = sessionFactory.getJdbcServices()
.getJdbcEnvironment()
.getSqlAstTranslatorFactory()
.buildSelectTranslator( sessionFactory, sqlAst )
.translate( JdbcParameterBindings.NO_BINDINGS, QueryOptions.NONE );
}
@Override
public String toString() {
return String.format(
Locale.ROOT,
"EntityBatchLoaderArrayParam(%s [%s])",
getLoadable().getEntityName(),
domainBatchSize
);
}
}

View File

@ -0,0 +1,337 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.loader.ast.internal;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.BatchFetchQueue;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.loader.ast.spi.EntityBatchLoader;
import org.hibernate.loader.ast.spi.SqlInPredicateMultiKeyLoader;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import static org.hibernate.internal.util.collections.CollectionHelper.arrayList;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/**
* An EntityBatchLoader using one or more SQL queries, which each initialize up to
* {@linkplain #getSqlBatchSize()} entities using a SQL IN predicate restriction -
* e.g., {@code ... where id in (?,?,...)}.
* <p/>
* The number of parameters rendered into the SQL is controlled by {@linkplain #getSqlBatchSize()}.
* Any unused parameter slots for a particular execution are set to {@code null}.
*
* @author Steve Ebersole
*/
public class EntityBatchLoaderInPredicate<T>
extends SingleIdEntityLoaderSupport<T>
implements EntityBatchLoader<T>, SqlInPredicateMultiKeyLoader, Preparable {
private final int domainBatchSize;
private final int sqlBatchSize;
private List<JdbcParameter> jdbcParameters;
private SelectStatement sqlAst;
private JdbcOperationQuerySelect jdbcSelectOperation;
/**
* @param domainBatchSize The maximum number of entities we will initialize for each {@link #load load}
* @param sqlBatchSize The number of keys our SQL AST should be able to fetch
*/
public EntityBatchLoaderInPredicate(
int domainBatchSize,
int sqlBatchSize,
EntityMappingType entityDescriptor,
SessionFactoryImplementor sessionFactory) {
super( entityDescriptor, sessionFactory );
this.domainBatchSize = domainBatchSize;
this.sqlBatchSize = sqlBatchSize;
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Batch fetching `%s` entity using padded IN-list : %s (%s)",
entityDescriptor.getEntityName(),
domainBatchSize,
sqlBatchSize
);
}
}
@Override
public int getDomainBatchSize() {
return domainBatchSize;
}
public int getSqlBatchSize() {
return sqlBatchSize;
}
@Override
public final T load(Object pkValue, LockOptions lockOptions, Boolean readOnly, SharedSessionContractImplementor session) {
return load( pkValue, null, lockOptions, readOnly, session );
}
@Override
public final T load(
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Batch loading entity `%s#%s`", getLoadable().getEntityName(), pkValue );
}
final Object[] idsToInitialize = resolveIdsToLoad( pkValue, session );
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Ids to batch-fetch initialize (`%s#%s`) %s", getLoadable().getEntityName(), pkValue, Arrays.toString(idsToInitialize) );
}
initializeEntities( idsToInitialize, pkValue, entityInstance, lockOptions, readOnly, session );
final EntityKey entityKey = session.generateEntityKey( pkValue, getLoadable().getEntityPersister() );
//noinspection unchecked
return (T) session.getPersistenceContext().getEntity( entityKey );
}
protected Object[] resolveIdsToLoad(Object pkValue, SharedSessionContractImplementor session) {
return session.getPersistenceContextInternal().getBatchFetchQueue().getBatchLoadableEntityIds(
getLoadable(),
pkValue,
domainBatchSize
);
}
protected void initializeEntities(
Object[] idsToInitialize,
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
final MultiKeyLoadChunker<Object> chunker = new MultiKeyLoadChunker<>(
sqlBatchSize,
getLoadable().getIdentifierMapping().getJdbcTypeCount(),
getLoadable().getIdentifierMapping(),
jdbcParameters,
sqlAst,
jdbcSelectOperation
);
final BatchFetchQueue batchFetchQueue = session.getPersistenceContextInternal().getBatchFetchQueue();
final List<EntityKey> entityKeys = arrayList( sqlBatchSize );
chunker.processChunks(
idsToInitialize,
sqlBatchSize,
(jdbcParameterBindings, session1) -> {
// Create a RegistrationHandler for handling any subselect fetches we encounter handling this chunk
final SubselectFetch.RegistrationHandler registrationHandler = SubselectFetch.createRegistrationHandler(
batchFetchQueue,
sqlAst,
jdbcParameters,
jdbcParameterBindings
);
return new SingleIdExecutionContext(
pkValue,
entityInstance,
readOnly,
lockOptions,
registrationHandler,
session
);
},
(key, relativePosition, absolutePosition) -> {
if ( key != null ) {
entityKeys.add( session.generateEntityKey( key, getLoadable().getEntityPersister() ) );
}
},
(startIndex) -> {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
MULTI_KEY_LOAD_LOGGER.debugf(
"Processing entity batch-fetch chunk (`%s#%s`) %s - %s",
getLoadable().getEntityName(),
pkValue,
startIndex,
startIndex + ( sqlBatchSize -1)
);
}
},
(startIndex, nonNullElementCount) -> {
entityKeys.forEach( batchFetchQueue::removeBatchLoadableEntityKey );
entityKeys.clear();
},
session
);
// int numberOfIdsLeft = idsToInitialize.length;
// int start = 0;
// while ( numberOfIdsLeft > 0 ) {
// if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
// MULTI_KEY_LOAD_LOGGER.debugf( "Processing batch-fetch chunk (`%s#%s`) %s - %s", getLoadable().getEntityName(), pkValue, start, start + ( sqlBatchSize -1) );
// }
// initializeChunk( idsToInitialize, start, pkValue, entityInstance, lockOptions, readOnly, session );
//
// start += sqlBatchSize;
// numberOfIdsLeft -= sqlBatchSize;
// }
}
private void initializeChunk(
Object[] idsToInitialize,
int start,
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
initializeChunk(
idsToInitialize,
getLoadable(),
start,
sqlBatchSize,
jdbcParameters,
sqlAst,
jdbcSelectOperation,
pkValue,
entityInstance,
lockOptions,
readOnly,
session
);
}
private static void initializeChunk(
Object[] idsToInitialize,
EntityMappingType entityMapping,
int startIndex,
int numberOfKeys,
List<JdbcParameter> jdbcParameters,
SelectStatement sqlAst,
JdbcOperationQuerySelect jdbcSelectOperation,
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
final BatchFetchQueue batchFetchQueue = session.getPersistenceContext().getBatchFetchQueue();
final int numberOfJdbcParameters = entityMapping.getIdentifierMapping().getJdbcTypeCount() * numberOfKeys;
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl( numberOfJdbcParameters );
final List<EntityKey> entityKeys = arrayList( numberOfKeys );
int bindCount = 0;
for ( int i = 0; i < numberOfKeys; i++ ) {
final int idPosition = i + startIndex;
final Object value;
if ( idPosition >= idsToInitialize.length ) {
value = null;
}
else {
value = idsToInitialize[idPosition];
}
if ( value != null ) {
entityKeys.add( session.generateEntityKey( value, entityMapping.getEntityPersister() ) );
}
bindCount += jdbcParameterBindings.registerParametersForEachJdbcValue(
value,
bindCount,
entityMapping.getIdentifierMapping(),
jdbcParameters,
session
);
}
assert bindCount == jdbcParameters.size();
if ( entityKeys.isEmpty() ) {
// there are no non-null keys in the chunk
return;
}
// Create a SubselectFetch.RegistrationHandler for handling any subselect fetches we encounter here
final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler(
batchFetchQueue,
sqlAst,
jdbcParameters,
jdbcParameterBindings
);
session.getJdbcServices().getJdbcSelectExecutor().list(
jdbcSelectOperation,
jdbcParameterBindings,
new SingleIdExecutionContext(
pkValue,
entityInstance,
readOnly,
lockOptions,
subSelectFetchableKeysHandler,
session
),
RowTransformerStandardImpl.instance(),
ListResultsConsumer.UniqueSemantic.FILTER
);
entityKeys.forEach( batchFetchQueue::removeBatchLoadableEntityKey );
}
@Override
public void prepare() {
EntityIdentifierMapping identifierMapping = getLoadable().getIdentifierMapping();
final int expectedNumberOfParameters = identifierMapping.getJdbcTypeCount() * sqlBatchSize;
jdbcParameters = arrayList( expectedNumberOfParameters );
sqlAst = LoaderSelectBuilder.createSelect(
getLoadable(),
// null here means to select everything
null,
identifierMapping,
null,
sqlBatchSize,
LoadQueryInfluencers.NONE,
LockOptions.NONE,
jdbcParameters::add,
sessionFactory
);
assert jdbcParameters.size() == expectedNumberOfParameters;
jdbcSelectOperation = sessionFactory.getJdbcServices()
.getJdbcEnvironment()
.getSqlAstTranslatorFactory()
.buildSelectTranslator( sessionFactory, sqlAst )
.translate( JdbcParameterBindings.NO_BINDINGS, QueryOptions.NONE );
}
@Override
public String toString() {
return String.format(
Locale.ROOT,
"EntityBatchLoaderInPredicate(%s [%s (%s)])",
getLoadable().getEntityName(),
domainBatchSize,
sqlBatchSize
);
}
}

View File

@ -6,23 +6,46 @@
*/ */
package org.hibernate.loader.ast.internal; package org.hibernate.loader.ast.internal;
import java.lang.reflect.Array;
import java.util.Collections;
import java.util.List;
import org.hibernate.LockMode; import org.hibernate.LockMode;
import org.hibernate.LockOptions; import org.hibernate.LockOptions;
import org.hibernate.ObjectDeletedException; import org.hibernate.ObjectDeletedException;
import org.hibernate.cache.spi.access.EntityDataAccess; import org.hibernate.cache.spi.access.EntityDataAccess;
import org.hibernate.cache.spi.access.SoftLock; import org.hibernate.cache.spi.access.SoftLock;
import org.hibernate.engine.spi.EntityEntry; import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status; import org.hibernate.engine.spi.Status;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.event.spi.EventSource; import org.hibernate.event.spi.EventSource;
import org.hibernate.loader.LoaderLogging; import org.hibernate.loader.LoaderLogging;
import org.hibernate.metamodel.mapping.BasicValuedModelPart;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterBindingImpl;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.hibernate.type.descriptor.java.JavaType;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class LoaderHelper { public class LoaderHelper {
/**
* Ensure the {@linkplain LockMode} associated with the entity in relation to a
* persistence context is {@linkplain LockMode#greaterThan great or equal} to the
* requested mode.
*/
public static void upgradeLock(Object object, EntityEntry entry, LockOptions lockOptions, EventSource session) { public static void upgradeLock(Object object, EntityEntry entry, LockOptions lockOptions, EventSource session) {
LockMode requestedLockMode = lockOptions.getLockMode(); LockMode requestedLockMode = lockOptions.getLockMode();
if ( requestedLockMode.greaterThan( entry.getLockMode() ) ) { if ( requestedLockMode.greaterThan( entry.getLockMode() ) ) {
@ -80,4 +103,125 @@ public class LoaderHelper {
} }
} }
/**
* Determine if the influencers associated with the given Session indicate read-only
*/
public static Boolean getReadOnlyFromLoadQueryInfluencers(SharedSessionContractImplementor session) {
return getReadOnlyFromLoadQueryInfluencers( session.getLoadQueryInfluencers() );
}
/**
* Determine if given influencers indicate read-only
*/
public static Boolean getReadOnlyFromLoadQueryInfluencers(LoadQueryInfluencers loadQueryInfluencers) {
if ( loadQueryInfluencers == null ) {
return null;
}
return loadQueryInfluencers.getReadOnly();
}
/**
* Normalize an array of keys (primary, foreign or natural).
* <p/>
* If the array is already typed as the key type, {@code keys} is simply returned.
* <p/>
* Otherwise, a new typed array is created and the contents copied from {@code keys} to this new array. If
* key {@linkplain org.hibernate.cfg.AvailableSettings#JPA_LOAD_BY_ID_COMPLIANCE coercion} is enabled, the
* values will be coerced to the key type.
*
* @param keys The keys to normalize
* @param keyPart The ModelPart describing the key
*
* @param <K> The key type
*/
public static <K> K[] normalizeKeys(
K[] keys,
BasicValuedModelPart keyPart,
SharedSessionContractImplementor session,
SessionFactoryImplementor sessionFactory) {
assert keys.getClass().isArray();
//noinspection unchecked
final JavaType<K> keyJavaType = (JavaType<K>) keyPart.getJavaType();
final Class<K> keyClass = keyJavaType.getJavaTypeClass();
if ( keys.getClass().getComponentType().equals( keyClass ) ) {
return keys;
}
final K[] typedArray = createTypedArray( keyClass, keys.length );
final boolean coerce = !sessionFactory.getJpaMetamodel().getJpaCompliance().isLoadByIdComplianceEnabled();
if ( !coerce ) {
System.arraycopy( keys, 0, typedArray, 0, keys.length );
}
else {
for ( int i = 0; i < keys.length; i++ ) {
typedArray[i] = keyJavaType.coerce( keys[i], session );
}
}
return typedArray;
}
/**
* Creates a typed array, as opposed to a generic {@code Object[]} that holds the typed values
*
* @param elementClass The type of the array elements. See {@link Class#getComponentType()}
* @param length The length to which the array should be created. This is usually zero for Hibernate uses
*/
public static <X> X[] createTypedArray(Class<X> elementClass, @SuppressWarnings("SameParameterValue") int length) {
//noinspection unchecked
return (X[]) Array.newInstance( elementClass, length );
}
/**
* Load one or more instances of a model part (an entity or collection)
* based on a SQL ARRAY parameter to specify the keys (as opposed to the
* more traditional SQL IN predicate approach).
*
* @param <R> The type of the model part to load
* @param <K> The type of the keys
*/
public static <R,K> List<R> loadByArrayParameter(
K[] idsToInitialize,
SelectStatement sqlAst,
JdbcOperationQuerySelect jdbcOperation,
JdbcParameter jdbcParameter,
JdbcMapping arrayJdbcMapping,
Object entityId,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
assert jdbcOperation != null;
assert jdbcParameter != null;
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl( 1);
jdbcParameterBindings.addBinding(
jdbcParameter,
new JdbcParameterBindingImpl( arrayJdbcMapping, idsToInitialize )
);
final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler(
session.getPersistenceContext().getBatchFetchQueue(),
sqlAst,
Collections.singletonList( jdbcParameter ),
jdbcParameterBindings
);
return session.getJdbcServices().getJdbcSelectExecutor().list(
jdbcOperation,
jdbcParameterBindings,
new SingleIdExecutionContext(
entityId,
entityInstance,
readOnly,
lockOptions,
subSelectFetchableKeysHandler,
session
),
RowTransformerStandardImpl.instance(),
ListResultsConsumer.UniqueSemantic.FILTER
);
}
} }

View File

@ -40,6 +40,7 @@ import org.hibernate.metamodel.mapping.NaturalIdMapping;
import org.hibernate.metamodel.mapping.NonAggregatedIdentifierMapping; import org.hibernate.metamodel.mapping.NonAggregatedIdentifierMapping;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.Restrictable; import org.hibernate.metamodel.mapping.Restrictable;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping; import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping;
import org.hibernate.metamodel.mapping.internal.SimpleForeignKeyDescriptor; import org.hibernate.metamodel.mapping.internal.SimpleForeignKeyDescriptor;
import org.hibernate.metamodel.mapping.internal.ToOneAttributeMapping; import org.hibernate.metamodel.mapping.internal.ToOneAttributeMapping;
@ -65,6 +66,7 @@ import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableGroupJoinProducer; import org.hibernate.sql.ast.tree.from.TableGroupJoinProducer;
import org.hibernate.sql.ast.tree.from.TableReference; import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate; import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.InListPredicate; import org.hibernate.sql.ast.tree.predicate.InListPredicate;
import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate; import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate;
import org.hibernate.sql.ast.tree.select.QueryPart; import org.hibernate.sql.ast.tree.select.QueryPart;
@ -99,13 +101,12 @@ public class LoaderSelectBuilder {
private static final Logger log = Logger.getLogger( LoaderSelectBuilder.class ); private static final Logger log = Logger.getLogger( LoaderSelectBuilder.class );
/** /**
* Create an SQL AST select-statement for a select by unique key based on matching one-or-more keys * Create an SQL AST select-statement for loading by unique key
* *
* @param loadable The root Loadable * @param loadable The root Loadable
* @param partsToSelect Parts of the Loadable to select. Null/empty indicates to select the Loadable itself * @param partsToSelect Parts of the Loadable to select. Null/empty indicates to select the Loadable itself
* @param restrictedPart Part to base the where-clause restriction on * @param restrictedPart Part to base the where-clause restriction on
* @param cachedDomainResult DomainResult to be used. Null indicates to generate the DomainResult * @param cachedDomainResult DomainResult to be used. Null indicates to generate the DomainResult
* @param numberOfKeysToLoad How many keys should be accounted for in the where-clause restriction?
* @param loadQueryInfluencers Any influencers (entity graph, fetch profile) to account for * @param loadQueryInfluencers Any influencers (entity graph, fetch profile) to account for
* @param lockOptions Pessimistic lock options to apply * @param lockOptions Pessimistic lock options to apply
* @param jdbcParameterConsumer Consumer for all JdbcParameter references created * @param jdbcParameterConsumer Consumer for all JdbcParameter references created
@ -116,7 +117,6 @@ public class LoaderSelectBuilder {
List<? extends ModelPart> partsToSelect, List<? extends ModelPart> partsToSelect,
ModelPart restrictedPart, ModelPart restrictedPart,
DomainResult<?> cachedDomainResult, DomainResult<?> cachedDomainResult,
int numberOfKeysToLoad,
LoadQueryInfluencers loadQueryInfluencers, LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions, LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer, Consumer<JdbcParameter> jdbcParameterConsumer,
@ -127,7 +127,7 @@ public class LoaderSelectBuilder {
partsToSelect, partsToSelect,
singletonList( restrictedPart ), singletonList( restrictedPart ),
cachedDomainResult, cachedDomainResult,
numberOfKeysToLoad, 1,
loadQueryInfluencers, loadQueryInfluencers,
lockOptions, lockOptions,
determineGraphTraversalState( loadQueryInfluencers ), determineGraphTraversalState( loadQueryInfluencers ),
@ -138,6 +138,86 @@ public class LoaderSelectBuilder {
return process.generateSelect(); return process.generateSelect();
} }
/**
* Create a select-statement (SQL AST) for loading by multiple keys using a single SQL ARRAY parameter
*/
public static SelectStatement createSelectBySingleArrayParameter(
Loadable loadable,
BasicValuedModelPart restrictedPart,
LoadQueryInfluencers influencers,
LockOptions lockOptions,
JdbcParameter jdbcArrayParameter,
SessionFactoryImplementor sessionFactory) {
final LoaderSelectBuilder builder = new LoaderSelectBuilder(
sessionFactory,
loadable,
null,
singletonList( restrictedPart ),
null,
-1,
influencers,
lockOptions,
determineGraphTraversalState( influencers ),
true,
null
);
final QuerySpec rootQuerySpec = new QuerySpec( true );
final LoaderSqlAstCreationState sqlAstCreationState = builder.createSqlAstCreationState( rootQuerySpec );
final NavigablePath rootNavigablePath = new NavigablePath( loadable.getRootPathName() );
final TableGroup rootTableGroup = builder.buildRootTableGroup( rootNavigablePath, rootQuerySpec, sqlAstCreationState );
final DomainResult<?> domainResult = loadable.createDomainResult(
rootNavigablePath,
rootTableGroup,
null,
sqlAstCreationState
);
final List<DomainResult<?>> domainResults = singletonList( domainResult );
applyArrayParamRestriction(
rootQuerySpec,
rootNavigablePath,
rootTableGroup,
restrictedPart,
jdbcArrayParameter,
sqlAstCreationState
);
if ( loadable instanceof PluralAttributeMapping ) {
final PluralAttributeMapping pluralAttributeMapping = (PluralAttributeMapping) loadable;
builder.applyFiltering( rootQuerySpec, rootTableGroup, pluralAttributeMapping, sqlAstCreationState );
builder.applyOrdering( rootQuerySpec, rootTableGroup, pluralAttributeMapping, sqlAstCreationState );
}
else {
builder.applyFiltering( rootQuerySpec, rootTableGroup, (Restrictable) loadable, sqlAstCreationState );
}
return new SelectStatement( rootQuerySpec, domainResults );
}
private static void applyArrayParamRestriction(
QuerySpec rootQuerySpec,
NavigablePath rootNavigablePath,
TableGroup rootTableGroup,
BasicValuedModelPart restrictedPart,
JdbcParameter jdbcArrayParameter,
LoaderSqlAstCreationState sqlAstCreationState) {
final SqlExpressionResolver sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver();
final SelectableMapping restrictedPartMapping = restrictedPart.getSelectable( 0 );
final NavigablePath restrictionPath = rootNavigablePath.append( restrictedPart.getNavigableRole().getNavigableName() );
final TableReference tableReference = rootTableGroup.resolveTableReference( restrictionPath, restrictedPartMapping.getContainingTableExpression() );
final ColumnReference columnRef = (ColumnReference) sqlExpressionResolver.resolveSqlExpression(
tableReference,
restrictedPartMapping
);
rootQuerySpec.applyPredicate( new InArrayPredicate( columnRef, jdbcArrayParameter ) );
}
/** /**
* Create an SQL AST select-statement based on matching one-or-more keys * Create an SQL AST select-statement based on matching one-or-more keys
* *
@ -372,90 +452,24 @@ public class LoaderSelectBuilder {
final NavigablePath rootNavigablePath = new NavigablePath( loadable.getRootPathName() ); final NavigablePath rootNavigablePath = new NavigablePath( loadable.getRootPathName() );
final QuerySpec rootQuerySpec = new QuerySpec( true ); final QuerySpec rootQuerySpec = new QuerySpec( true );
final LoaderSqlAstCreationState sqlAstCreationState = createSqlAstCreationState( rootQuerySpec );
final TableGroup rootTableGroup = buildRootTableGroup( rootNavigablePath, rootQuerySpec, sqlAstCreationState );
final List<DomainResult<?>> domainResults; final List<DomainResult<?>> domainResults;
final LoaderSqlAstCreationState sqlAstCreationState = new LoaderSqlAstCreationState(
rootQuerySpec,
new SqlAliasBaseManager(),
new SimpleFromClauseAccessImpl(),
lockOptions,
this::visitFetches,
forceIdentifierSelection,
loadQueryInfluencers,
creationContext
);
final TableGroup rootTableGroup = loadable.createRootTableGroup(
true,
rootNavigablePath,
null,
null,
() -> rootQuerySpec::applyPredicate,
sqlAstCreationState
);
rootQuerySpec.getFromClause().addRoot( rootTableGroup );
sqlAstCreationState.getFromClauseAccess().registerTableGroup( rootNavigablePath, rootTableGroup );
registerPluralTableGroupParts( sqlAstCreationState.getFromClauseAccess(), rootTableGroup );
if ( partsToSelect != null && !partsToSelect.isEmpty() ) { if ( partsToSelect != null && !partsToSelect.isEmpty() ) {
domainResults = new ArrayList<>( partsToSelect.size() ); domainResults = buildRequestedDomainResults( rootNavigablePath, sqlAstCreationState, rootTableGroup );
for ( ModelPart part : partsToSelect ) { }
final NavigablePath navigablePath = rootNavigablePath.append( part.getPartName() ); else if ( this.cachedDomainResult != null ) {
final TableGroup tableGroup; domainResults = singletonList( this.cachedDomainResult );
if ( part instanceof TableGroupJoinProducer ) {
final TableGroupJoinProducer tableGroupJoinProducer = (TableGroupJoinProducer) part;
final TableGroupJoin tableGroupJoin = tableGroupJoinProducer.createTableGroupJoin(
navigablePath,
rootTableGroup,
null,
null,
SqlAstJoinType.LEFT,
true,
false,
sqlAstCreationState
);
rootTableGroup.addTableGroupJoin( tableGroupJoin );
tableGroup = tableGroupJoin.getJoinedGroup();
sqlAstCreationState.getFromClauseAccess().registerTableGroup( navigablePath, tableGroup );
registerPluralTableGroupParts( sqlAstCreationState.getFromClauseAccess(), tableGroup );
}
else {
tableGroup = rootTableGroup;
}
domainResults.add(
part.createDomainResult(
navigablePath,
tableGroup,
null,
sqlAstCreationState
)
);
}
} }
else { else {
// use the one passed to the constructor or create one (maybe always create and pass?) final DomainResult<?> domainResult = loadable.createDomainResult(
// allows re-use as they can be re-used to save on memory - they rootNavigablePath,
// do not share state between rootTableGroup,
null,
//noinspection rawtypes sqlAstCreationState
final DomainResult domainResult; );
if ( this.cachedDomainResult != null ) {
// used the one passed to the constructor
domainResult = this.cachedDomainResult;
}
else {
// create one
domainResult = loadable.createDomainResult(
rootNavigablePath,
rootTableGroup,
null,
sqlAstCreationState
);
}
//noinspection unchecked
domainResults = singletonList( domainResult ); domainResults = singletonList( domainResult );
} }
@ -485,19 +499,87 @@ public class LoaderSelectBuilder {
return new SelectStatement( rootQuerySpec, domainResults ); return new SelectStatement( rootQuerySpec, domainResults );
} }
private List<DomainResult<?>> buildRequestedDomainResults(NavigablePath rootNavigablePath, LoaderSqlAstCreationState sqlAstCreationState, TableGroup rootTableGroup) {
final List<DomainResult<?>> domainResults;
domainResults = new ArrayList<>( partsToSelect.size() );
for ( ModelPart part : partsToSelect ) {
final NavigablePath navigablePath = rootNavigablePath.append( part.getPartName() );
final TableGroup tableGroup;
if ( part instanceof TableGroupJoinProducer ) {
final TableGroupJoinProducer tableGroupJoinProducer = (TableGroupJoinProducer) part;
final TableGroupJoin tableGroupJoin = tableGroupJoinProducer.createTableGroupJoin(
navigablePath,
rootTableGroup,
null,
null,
SqlAstJoinType.LEFT,
true,
false,
sqlAstCreationState
);
rootTableGroup.addTableGroupJoin( tableGroupJoin );
tableGroup = tableGroupJoin.getJoinedGroup();
sqlAstCreationState.getFromClauseAccess().registerTableGroup( navigablePath, tableGroup );
registerPluralTableGroupParts( sqlAstCreationState.getFromClauseAccess(), tableGroup );
}
else {
tableGroup = rootTableGroup;
}
domainResults.add(
part.createDomainResult(
navigablePath,
tableGroup,
null,
sqlAstCreationState
)
);
}
return domainResults;
}
private TableGroup buildRootTableGroup(NavigablePath rootNavigablePath, QuerySpec rootQuerySpec, LoaderSqlAstCreationState sqlAstCreationState) {
final TableGroup rootTableGroup = loadable.createRootTableGroup(
true,
rootNavigablePath,
null,
null,
() -> rootQuerySpec::applyPredicate,
sqlAstCreationState
);
rootQuerySpec.getFromClause().addRoot( rootTableGroup );
sqlAstCreationState.getFromClauseAccess().registerTableGroup( rootNavigablePath, rootTableGroup );
registerPluralTableGroupParts( sqlAstCreationState.getFromClauseAccess(), rootTableGroup );
return rootTableGroup;
}
private LoaderSqlAstCreationState createSqlAstCreationState(QuerySpec rootQuerySpec) {
final LoaderSqlAstCreationState sqlAstCreationState = new LoaderSqlAstCreationState(
rootQuerySpec,
new SqlAliasBaseManager(),
new SimpleFromClauseAccessImpl(),
lockOptions,
this::visitFetches,
forceIdentifierSelection,
loadQueryInfluencers,
creationContext
);
return sqlAstCreationState;
}
private void applyRestriction( private void applyRestriction(
QuerySpec rootQuerySpec, QuerySpec rootQuerySpec,
NavigablePath rootNavigablePath, NavigablePath rootNavigablePath,
TableGroup rootTableGroup, TableGroup rootTableGroup,
ModelPart modelPart, ModelPart restrictedPart,
int numberColumns, int numberColumns,
Consumer<JdbcParameter> jdbcParameterConsumer, Consumer<JdbcParameter> jdbcParameterConsumer,
LoaderSqlAstCreationState sqlAstCreationState) { LoaderSqlAstCreationState sqlAstCreationState) {
final SqlExpressionResolver sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver(); final SqlExpressionResolver sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver();
final NavigablePath navigablePath = rootNavigablePath.append( modelPart.getNavigableRole().getNavigableName() ); final NavigablePath navigablePath = rootNavigablePath.append( restrictedPart.getNavigableRole().getNavigableName() );
if ( numberColumns == 1 ) { if ( numberColumns == 1 ) {
modelPart.forEachSelectable( restrictedPart.forEachSelectable(
(columnIndex, selection) -> { (columnIndex, selection) -> {
final TableReference tableReference = rootTableGroup.resolveTableReference( final TableReference tableReference = rootTableGroup.resolveTableReference(
navigablePath, selection.getContainingTableExpression() ); navigablePath, selection.getContainingTableExpression() );
@ -532,7 +614,7 @@ public class LoaderSelectBuilder {
else { else {
final List<ColumnReference> columnReferences = new ArrayList<>( numberColumns ); final List<ColumnReference> columnReferences = new ArrayList<>( numberColumns );
modelPart.forEachSelectable( restrictedPart.forEachSelectable(
(columnIndex, selection) -> { (columnIndex, selection) -> {
final TableReference tableReference = rootTableGroup.resolveTableReference( navigablePath, selection.getContainingTableExpression() ); final TableReference tableReference = rootTableGroup.resolveTableReference( navigablePath, selection.getContainingTableExpression() );
columnReferences.add( columnReferences.add(
@ -544,7 +626,7 @@ public class LoaderSelectBuilder {
} }
); );
final SqlTuple tuple = new SqlTuple( columnReferences, modelPart ); final SqlTuple tuple = new SqlTuple( columnReferences, restrictedPart );
final InListPredicate predicate = new InListPredicate( tuple ); final InListPredicate predicate = new InListPredicate( tuple );
for ( int i = 0; i < numberOfKeysToLoad; i++ ) { for ( int i = 0; i < numberOfKeysToLoad; i++ ) {
@ -555,7 +637,7 @@ public class LoaderSelectBuilder {
jdbcParameterConsumer.accept( jdbcParameter ); jdbcParameterConsumer.accept( jdbcParameter );
tupleParams.add( jdbcParameter ); tupleParams.add( jdbcParameter );
} }
final SqlTuple paramTuple = new SqlTuple( tupleParams, modelPart ); final SqlTuple paramTuple = new SqlTuple( tupleParams, restrictedPart );
predicate.addExpression( paramTuple ); predicate.addExpression( paramTuple );
} }
@ -974,18 +1056,7 @@ public class LoaderSelectBuilder {
creationContext creationContext
); );
final TableGroup rootTableGroup = loadable.createRootTableGroup( final TableGroup rootTableGroup = buildRootTableGroup( rootNavigablePath, rootQuerySpec, sqlAstCreationState );
true,
rootNavigablePath,
null,
null,
() -> rootQuerySpec::applyPredicate,
sqlAstCreationState
);
rootQuerySpec.getFromClause().addRoot( rootTableGroup );
sqlAstCreationState.getFromClauseAccess().registerTableGroup( rootNavigablePath, rootTableGroup );
registerPluralTableGroupParts( sqlAstCreationState.getFromClauseAccess(), rootTableGroup );
// generate and apply the restriction // generate and apply the restriction
applySubSelectRestriction( applySubSelectRestriction(

View File

@ -0,0 +1,409 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.engine.internal.BatchFetchQueueHelper;
import org.hibernate.engine.spi.BatchFetchQueue;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.event.spi.EventSource;
import org.hibernate.event.spi.LoadEvent;
import org.hibernate.event.spi.LoadEventListener;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.loader.ast.internal.CacheEntityLoaderHelper.PersistenceContextEntry;
import org.hibernate.loader.ast.spi.MultiIdLoadOptions;
import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader;
import org.hibernate.metamodel.mapping.BasicEntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterBindingImpl;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.internal.JdbcParameterImpl;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.checkerframework.checker.nullness.qual.NonNull;
import static org.hibernate.internal.util.collections.CollectionHelper.isEmpty;
/**
* @author Steve Ebersole
*/
public class MultiIdEntityLoaderArrayParam<E> extends AbstractMultiIdEntityLoader<E> implements SqlArrayMultiKeyLoader, Preparable {
private JdbcMapping arrayJdbcMapping;
private JdbcParameter jdbcParameter;
public MultiIdEntityLoaderArrayParam(EntityMappingType entityDescriptor, SessionFactoryImplementor sessionFactory) {
super( entityDescriptor, sessionFactory );
}
@Override
public BasicEntityIdentifierMapping getIdentifierMapping() {
return (BasicEntityIdentifierMapping) super.getIdentifierMapping();
}
@Override
protected <K> List<E> performOrderedMultiLoad(K[] ids, MultiIdLoadOptions loadOptions, EventSource session) {
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) {
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef(
"MultiIdEntityLoaderArrayParam#performOrderedMultiLoad - %s",
getLoadable().getEntityName()
);
}
final boolean coerce = !getSessionFactory().getJpaMetamodel().getJpaCompliance().isLoadByIdComplianceEnabled();
final LockOptions lockOptions = (loadOptions.getLockOptions() == null)
? new LockOptions( LockMode.NONE )
: loadOptions.getLockOptions();
final List<Object> result = CollectionHelper.arrayList( ids.length );
List<Object> idsToLoadFromDatabase = null;
List<Integer> idsToLoadFromDatabaseResultIndexes = null;
for ( int i = 0; i < ids.length; i++ ) {
final Object id;
if ( coerce ) {
id = getLoadable().getIdentifierMapping().getJavaType().coerce( ids[ i ], session );
}
else {
id = ids[ i ];
}
final EntityKey entityKey = new EntityKey( id, getLoadable().getEntityPersister() );
if ( loadOptions.isSessionCheckingEnabled() || loadOptions.isSecondLevelCacheCheckingEnabled() ) {
LoadEvent loadEvent = new LoadEvent(
id,
getLoadable().getJavaType().getJavaTypeClass().getName(),
lockOptions,
session,
LoaderHelper.getReadOnlyFromLoadQueryInfluencers(session)
);
Object managedEntity = null;
if ( loadOptions.isSessionCheckingEnabled() ) {
// look for it in the Session first
final PersistenceContextEntry persistenceContextEntry = CacheEntityLoaderHelper.loadFromSessionCacheStatic(
loadEvent,
entityKey,
LoadEventListener.GET
);
managedEntity = persistenceContextEntry.getEntity();
if ( managedEntity != null
&& !loadOptions.isReturnOfDeletedEntitiesEnabled()
&& !persistenceContextEntry.isManaged() ) {
// put a null in the result
result.add( i, null );
continue;
}
}
if ( managedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) {
// look for it in the SessionFactory
managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent,
getLoadable().getEntityPersister(),
entityKey
);
}
if ( managedEntity != null ) {
result.add( i, managedEntity );
continue;
}
}
// if we did not hit any of the continues above, then we need to batch
// load the entity state.
if ( idsToLoadFromDatabase == null ) {
idsToLoadFromDatabase = new ArrayList<>();
idsToLoadFromDatabaseResultIndexes = new ArrayList<>();
}
// hold its place in the result with the EntityKey, we'll come back to it later
result.add( i, entityKey );
idsToLoadFromDatabase.add( id );
idsToLoadFromDatabaseResultIndexes.add( i );
}
if ( idsToLoadFromDatabase == null ) {
// all the given ids were already associated with the Session
//noinspection unchecked
return (List<E>) result;
}
final SelectStatement sqlAst = LoaderSelectBuilder.createSelectBySingleArrayParameter(
getLoadable(),
getIdentifierMapping(),
session.getLoadQueryInfluencers(),
lockOptions,
jdbcParameter,
getSessionFactory()
);
final JdbcOperationQuerySelect jdbcSelectOperation = getSessionFactory().getJdbcServices()
.getJdbcEnvironment()
.getSqlAstTranslatorFactory()
.buildSelectTranslator( getSessionFactory(), sqlAst )
.translate( JdbcParameterBindings.NO_BINDINGS, QueryOptions.NONE );
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl(1);
jdbcParameterBindings.addBinding(
jdbcParameter,
new JdbcParameterBindingImpl( arrayJdbcMapping, idsToLoadFromDatabase.toArray( createTypedArray(0 ) ) )
);
final PersistenceContext persistenceContext = session.getPersistenceContext();
final BatchFetchQueue batchFetchQueue = persistenceContext.getBatchFetchQueue();
final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler(
batchFetchQueue,
sqlAst,
Collections.singletonList( jdbcParameter ),
jdbcParameterBindings
);
session.getJdbcServices().getJdbcSelectExecutor().list(
jdbcSelectOperation,
jdbcParameterBindings,
new ExecutionContextWithSubselectFetchHandler( session, subSelectFetchableKeysHandler ),
RowTransformerStandardImpl.instance(),
ListResultsConsumer.UniqueSemantic.FILTER
);
for ( int i = 0; i < idsToLoadFromDatabaseResultIndexes.size(); i++ ) {
final Integer resultIndex = idsToLoadFromDatabaseResultIndexes.get(i);
// the element value at this position in the result List should be
// the EntityKey for that entity - reuse it
final EntityKey entityKey = (EntityKey) result.get( resultIndex );
BatchFetchQueueHelper.removeBatchLoadableEntityKey( entityKey, session );
Object entity = persistenceContext.getEntity( entityKey );
if ( entity != null && !loadOptions.isReturnOfDeletedEntitiesEnabled() ) {
// make sure it is not DELETED
final EntityEntry entry = persistenceContext.getEntry( entity );
if ( entry.getStatus().isDeletedOrGone() ) {
// the entity is locally deleted, and the options ask that we not return such entities...
entity = null;
}
}
result.set( resultIndex, entity );
}
//noinspection unchecked
return (List<E>) result;
}
@Override
protected <K> List<E> performUnorderedMultiLoad(
K[] ids,
MultiIdLoadOptions loadOptions,
EventSource session) {
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) {
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef(
"MultiIdEntityLoaderArrayParam#performUnorderedMultiLoad - %s",
getLoadable().getEntityName()
);
}
final List<E> result = CollectionHelper.arrayList( ids.length );
final LockOptions lockOptions = (loadOptions.getLockOptions() == null)
? new LockOptions( LockMode.NONE )
: loadOptions.getLockOptions();
//noinspection unchecked
final K[] idsToLoadFromDatabase = processResolvableEntities(
ids,
(index, entityKey, resolvedEntity) -> result.add( (E) resolvedEntity ),
loadOptions,
lockOptions,
session
);
if ( idsToLoadFromDatabase == null ) {
// all the given ids were already associated with the Session
return result;
}
final SelectStatement sqlAst = LoaderSelectBuilder.createSelectBySingleArrayParameter(
getLoadable(),
getIdentifierMapping(),
session.getLoadQueryInfluencers(),
lockOptions,
jdbcParameter,
getSessionFactory()
);
final JdbcOperationQuerySelect jdbcSelectOperation = getSessionFactory().getJdbcServices()
.getJdbcEnvironment()
.getSqlAstTranslatorFactory()
.buildSelectTranslator( getSessionFactory(), sqlAst )
.translate( JdbcParameterBindings.NO_BINDINGS, QueryOptions.NONE );
final List<E> databaseResults = LoaderHelper.loadByArrayParameter(
idsToLoadFromDatabase,
sqlAst,
jdbcSelectOperation,
jdbcParameter,
arrayJdbcMapping,
null,
null,
lockOptions,
session.isDefaultReadOnly(),
session
);
result.addAll( databaseResults );
//noinspection ForLoopReplaceableByForEach
for ( int i = 0; i < idsToLoadFromDatabase.length; i++ ) {
final Object id = idsToLoadFromDatabase[i];
if ( id == null ) {
// skip any of the null padded ids
// - actually we could probably even break here
continue;
}
// found or not, remove the key from the batch-fetch queue
BatchFetchQueueHelper.removeBatchLoadableEntityKey( id, getLoadable(), session );
}
return result;
}
public interface ResolutionConsumer<T> {
void consume(int position, EntityKey entityKey, T resolvedRef);
}
protected final <R,K> K[] processResolvableEntities(
K[] ids,
ResolutionConsumer<R> resolutionConsumer,
@NonNull MultiIdLoadOptions loadOptions,
@NonNull LockOptions lockOptions,
EventSource session) {
if ( !loadOptions.isSessionCheckingEnabled()
&& !loadOptions.isSecondLevelCacheCheckingEnabled() ) {
// we'll load all of them from the database
return ids;
}
final boolean coerce = !getSessionFactory().getJpaMetamodel().getJpaCompliance().isLoadByIdComplianceEnabled();
boolean foundAnyResolvedEntities = false;
List<K> nonResolvedIds = null;
for ( int i = 0; i < ids.length; i++ ) {
final Object id;
if ( coerce ) {
//noinspection unchecked
id = (K) getLoadable().getIdentifierMapping().getJavaType().coerce( ids[i], session );
}
else {
id = ids[i];
}
final EntityKey entityKey = new EntityKey( id, getLoadable().getEntityPersister() );
final LoadEvent loadEvent = new LoadEvent(
id,
getLoadable().getJavaType().getJavaTypeClass().getName(),
lockOptions,
session,
LoaderHelper.getReadOnlyFromLoadQueryInfluencers( session )
);
Object resolvedEntity = null;
// look for it in the Session first
final PersistenceContextEntry persistenceContextEntry = CacheEntityLoaderHelper.loadFromSessionCacheStatic(
loadEvent,
entityKey,
LoadEventListener.GET
);
if ( loadOptions.isSessionCheckingEnabled() ) {
resolvedEntity = persistenceContextEntry.getEntity();
if ( resolvedEntity != null
&& !loadOptions.isReturnOfDeletedEntitiesEnabled()
&& !persistenceContextEntry.isManaged() ) {
foundAnyResolvedEntities = true;
resolutionConsumer.consume( i, entityKey, null );
continue;
}
}
if ( resolvedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) {
resolvedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent,
getLoadable().getEntityPersister(),
entityKey
);
}
if ( resolvedEntity != null ) {
foundAnyResolvedEntities = true;
//noinspection unchecked
resolutionConsumer.consume( i, entityKey, (R) resolvedEntity);
}
else {
if ( nonResolvedIds == null ) {
nonResolvedIds = new ArrayList<>();
}
//noinspection unchecked,CastCanBeRemovedNarrowingVariableType
nonResolvedIds.add( (K) id );
}
}
if ( foundAnyResolvedEntities ) {
if ( isEmpty( nonResolvedIds ) ) {
// all the given ids were already associated with the Session
return null;
}
return nonResolvedIds.toArray( createTypedArray(0) );
}
return ids;
}
private <X> X[] createTypedArray(@SuppressWarnings("SameParameterValue") int length) {
//noinspection unchecked
return (X[]) Array.newInstance( getIdentifierMapping().getJavaType().getJavaTypeClass(), length );
}
@Override
public void prepare() {
super.prepare();
final Class<?> arrayClass = createTypedArray( 0 ).getClass();
final BasicTypeRegistry basicTypeRegistry = getSessionFactory().getTypeConfiguration().getBasicTypeRegistry();
final BasicType<?> arrayBasicType = basicTypeRegistry.getRegisteredType( arrayClass );
arrayJdbcMapping = MultiKeyLoadHelper.resolveArrayJdbcMapping(
arrayBasicType,
getIdentifierMapping().getJdbcMapping(),
arrayClass,
getSessionFactory()
);
jdbcParameter = new JdbcParameterImpl( arrayJdbcMapping );
}
}

View File

@ -19,7 +19,6 @@ import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.EntityEntry; import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.EntityKey; import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.PersistenceContext; import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
@ -28,10 +27,8 @@ import org.hibernate.event.spi.EventSource;
import org.hibernate.event.spi.LoadEvent; import org.hibernate.event.spi.LoadEvent;
import org.hibernate.event.spi.LoadEventListener; import org.hibernate.event.spi.LoadEventListener;
import org.hibernate.internal.util.collections.CollectionHelper; import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.loader.ast.spi.MultiIdEntityLoader;
import org.hibernate.loader.ast.spi.MultiIdLoadOptions; import org.hibernate.loader.ast.spi.MultiIdLoadOptions;
import org.hibernate.mapping.PersistentClass; import org.hibernate.mapping.PersistentClass;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.query.spi.QueryOptions; import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.SqlAstTranslatorFactory; import org.hibernate.sql.ast.SqlAstTranslatorFactory;
@ -46,55 +43,37 @@ import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
/** /**
* Standard MultiIdEntityLoader
*
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> { public class MultiIdEntityLoaderStandard<T> extends AbstractMultiIdEntityLoader<T> {
private static final Logger log = Logger.getLogger( MultiIdLoaderStandard.class ); private static final Logger log = Logger.getLogger( MultiIdEntityLoaderStandard.class );
private final EntityPersister entityDescriptor;
private final SessionFactoryImplementor sessionFactory;
private final int idJdbcTypeCount; private final int idJdbcTypeCount;
public MultiIdLoaderStandard( public MultiIdEntityLoaderStandard(
EntityPersister entityDescriptor, EntityPersister entityDescriptor,
PersistentClass bootDescriptor, PersistentClass bootDescriptor,
SessionFactoryImplementor sessionFactory) { SessionFactoryImplementor sessionFactory) {
this.entityDescriptor = entityDescriptor; super( entityDescriptor, sessionFactory );
this.idJdbcTypeCount = bootDescriptor.getIdentifier().getColumnSpan(); this.idJdbcTypeCount = bootDescriptor.getIdentifier().getColumnSpan();
this.sessionFactory = sessionFactory;
assert idJdbcTypeCount > 0; assert idJdbcTypeCount > 0;
} }
@Override @Override
public EntityMappingType getLoadable() { protected List<T> performOrderedMultiLoad(
return entityDescriptor;
}
@Override
public List<T> load(Object[] ids, MultiIdLoadOptions loadOptions, EventSource session) {
assert ids != null;
if ( loadOptions.isOrderReturnEnabled() ) {
return performOrderedMultiLoad( ids, session, loadOptions );
}
else {
return performUnorderedMultiLoad( ids, session, loadOptions );
}
}
private List<T> performOrderedMultiLoad(
Object[] ids, Object[] ids,
EventSource session, MultiIdLoadOptions loadOptions,
MultiIdLoadOptions loadOptions) { EventSource session) {
if ( log.isTraceEnabled() ) { if ( log.isTraceEnabled() ) {
log.tracef( "#performOrderedMultiLoad(`%s`, ..)", entityDescriptor.getEntityName() ); log.tracef( "#performOrderedMultiLoad(`%s`, ..)", getLoadable().getEntityName() );
} }
assert loadOptions.isOrderReturnEnabled(); assert loadOptions.isOrderReturnEnabled();
final JdbcEnvironment jdbcEnvironment = sessionFactory.getJdbcServices().getJdbcEnvironment(); final JdbcEnvironment jdbcEnvironment = getSessionFactory().getJdbcServices().getJdbcEnvironment();
final Dialect dialect = jdbcEnvironment.getDialect(); final Dialect dialect = jdbcEnvironment.getDialect();
final List<Object> result = CollectionHelper.arrayList( ids.length ); final List<Object> result = CollectionHelper.arrayList( ids.length );
@ -108,34 +87,34 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
maxBatchSize = loadOptions.getBatchSize(); maxBatchSize = loadOptions.getBatchSize();
} }
else { else {
maxBatchSize = dialect.getDefaultBatchLoadSizingStrategy().determineOptimalBatchLoadSize( maxBatchSize = dialect.getBatchLoadSizingStrategy().determineOptimalBatchLoadSize(
idJdbcTypeCount, idJdbcTypeCount,
ids.length, ids.length,
sessionFactory.getSessionFactoryOptions().inClauseParameterPaddingEnabled() getSessionFactory().getSessionFactoryOptions().inClauseParameterPaddingEnabled()
); );
} }
final List<Object> idsInBatch = new ArrayList<>(); final List<Object> idsInBatch = new ArrayList<>();
final List<Integer> elementPositionsLoadedByBatch = new ArrayList<>(); final List<Integer> elementPositionsLoadedByBatch = new ArrayList<>();
final boolean coerce = !sessionFactory.getJpaMetamodel().getJpaCompliance().isLoadByIdComplianceEnabled(); final boolean coerce = !getSessionFactory().getJpaMetamodel().getJpaCompliance().isLoadByIdComplianceEnabled();
for ( int i = 0; i < ids.length; i++ ) { for ( int i = 0; i < ids.length; i++ ) {
final Object id; final Object id;
if ( coerce ) { if ( coerce ) {
id = entityDescriptor.getIdentifierMapping().getJavaType().coerce( ids[i], session ); id = getLoadable().getIdentifierMapping().getJavaType().coerce( ids[i], session );
} }
else { else {
id = ids[i]; id = ids[i];
} }
final EntityKey entityKey = new EntityKey( id, entityDescriptor ); final EntityKey entityKey = new EntityKey( id, getLoadable().getEntityPersister() );
if ( loadOptions.isSessionCheckingEnabled() || loadOptions.isSecondLevelCacheCheckingEnabled() ) { if ( loadOptions.isSessionCheckingEnabled() || loadOptions.isSecondLevelCacheCheckingEnabled() ) {
LoadEvent loadEvent = new LoadEvent( LoadEvent loadEvent = new LoadEvent(
id, id,
entityDescriptor.getMappedClass().getName(), getLoadable().getJavaType().getJavaTypeClass().getName(),
lockOptions, lockOptions,
session, session,
getReadOnlyFromLoadQueryInfluencers(session) LoaderHelper.getReadOnlyFromLoadQueryInfluencers(session)
); );
Object managedEntity = null; Object managedEntity = null;
@ -163,7 +142,7 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
// look for it in the SessionFactory // look for it in the SessionFactory
managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache( managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent, loadEvent,
entityDescriptor, getLoadable().getEntityPersister(),
entityKey entityKey
); );
} }
@ -184,8 +163,7 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
idsInBatch.clear(); idsInBatch.clear();
} }
// Save the EntityKey instance for use later! // Save the EntityKey instance for use later
// todo (6.0) : see below wrt why `elementPositionsLoadedByBatch` probably isn't needed
result.add( i, entityKey ); result.add( i, entityKey );
elementPositionsLoadedByBatch.add( i ); elementPositionsLoadedByBatch.add( i );
} }
@ -196,8 +174,7 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
loadEntitiesById( idsInBatch, lockOptions, session ); loadEntitiesById( idsInBatch, lockOptions, session );
} }
// todo (6.0) : can't we just walk all elements of the results looking for EntityKey and replacing here? // for each result where we set the EntityKey earlier, replace them
// can't imagine
final PersistenceContext persistenceContext = session.getPersistenceContextInternal(); final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
for ( Integer position : elementPositionsLoadedByBatch ) { for ( Integer position : elementPositionsLoadedByBatch ) {
// the element value at this position in the result List should be // the element value at this position in the result List should be
@ -232,7 +209,7 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
} }
if ( log.isTraceEnabled() ) { if ( log.isTraceEnabled() ) {
log.tracef( "#loadEntitiesById(`%s`, `%s`, ..)", entityDescriptor.getEntityName(), numberOfIdsInBatch ); log.tracef( "#loadEntitiesById(`%s`, `%s`, ..)", getLoadable().getEntityName(), numberOfIdsInBatch );
} }
final List<JdbcParameter> jdbcParameters = new ArrayList<>( numberOfIdsInBatch * idJdbcTypeCount); final List<JdbcParameter> jdbcParameters = new ArrayList<>( numberOfIdsInBatch * idJdbcTypeCount);
@ -247,10 +224,10 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
session.getLoadQueryInfluencers(), session.getLoadQueryInfluencers(),
lockOptions, lockOptions,
jdbcParameters::add, jdbcParameters::add,
sessionFactory getSessionFactory()
); );
final JdbcServices jdbcServices = sessionFactory.getJdbcServices(); final JdbcServices jdbcServices = getSessionFactory().getJdbcServices();
final JdbcEnvironment jdbcEnvironment = jdbcServices.getJdbcEnvironment(); final JdbcEnvironment jdbcEnvironment = jdbcServices.getJdbcEnvironment();
final SqlAstTranslatorFactory sqlAstTranslatorFactory = jdbcEnvironment.getSqlAstTranslatorFactory(); final SqlAstTranslatorFactory sqlAstTranslatorFactory = jdbcEnvironment.getSqlAstTranslatorFactory();
@ -263,7 +240,7 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
offset += jdbcParameterBindings.registerParametersForEachJdbcValue( offset += jdbcParameterBindings.registerParametersForEachJdbcValue(
id, id,
offset, offset,
entityDescriptor.getIdentifierMapping(), getLoadable().getIdentifierMapping(),
jdbcParameters, jdbcParameters,
session session
); );
@ -271,11 +248,11 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
// we should have used all the JdbcParameter references (created bindings for all) // we should have used all the JdbcParameter references (created bindings for all)
assert offset == jdbcParameters.size(); assert offset == jdbcParameters.size();
final JdbcOperationQuerySelect jdbcSelect = sqlAstTranslatorFactory.buildSelectTranslator( sessionFactory, sqlAst ) final JdbcOperationQuerySelect jdbcSelect = sqlAstTranslatorFactory.buildSelectTranslator( getSessionFactory(), sqlAst )
.translate( jdbcParameterBindings, QueryOptions.NONE ); .translate( jdbcParameterBindings, QueryOptions.NONE );
final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler; final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler;
if ( entityDescriptor.hasSubselectLoadableCollections() ) { if ( getLoadable().getEntityPersister().hasSubselectLoadableCollections() ) {
subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler( subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler(
session.getPersistenceContext().getBatchFetchQueue(), session.getPersistenceContext().getBatchFetchQueue(),
sqlAst, sqlAst,
@ -297,19 +274,21 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
} }
private List<T> performSingleMultiLoad(Object id, LockOptions lockOptions, SharedSessionContractImplementor session) { private List<T> performSingleMultiLoad(Object id, LockOptions lockOptions, SharedSessionContractImplementor session) {
T loaded = (T) entityDescriptor.load( id, null, lockOptions, session ); //noinspection unchecked
T loaded = (T) getLoadable().getEntityPersister().load( id, null, lockOptions, session );
return Collections.singletonList( loaded ); return Collections.singletonList( loaded );
} }
private List<T> performUnorderedMultiLoad( @Override
protected List<T> performUnorderedMultiLoad(
Object[] ids, Object[] ids,
EventSource session, MultiIdLoadOptions loadOptions,
MultiIdLoadOptions loadOptions) { EventSource session) {
assert !loadOptions.isOrderReturnEnabled(); assert !loadOptions.isOrderReturnEnabled();
assert ids != null; assert ids != null;
if ( log.isTraceEnabled() ) { if ( log.isTraceEnabled() ) {
log.tracef( "#performUnorderedMultiLoad(`%s`, ..)", entityDescriptor.getEntityName() ); log.tracef( "#performUnorderedMultiLoad(`%s`, ..)", getLoadable().getEntityName() );
} }
final List<T> result = CollectionHelper.arrayList( ids.length ); final List<T> result = CollectionHelper.arrayList( ids.length );
@ -327,23 +306,23 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
boolean foundAnyManagedEntities = false; boolean foundAnyManagedEntities = false;
final List<Object> nonManagedIds = new ArrayList<>(); final List<Object> nonManagedIds = new ArrayList<>();
final boolean coerce = !sessionFactory.getJpaMetamodel().getJpaCompliance().isLoadByIdComplianceEnabled(); final boolean coerce = !getSessionFactory().getJpaMetamodel().getJpaCompliance().isLoadByIdComplianceEnabled();
for ( int i = 0; i < ids.length; i++ ) { for ( int i = 0; i < ids.length; i++ ) {
final Object id; final Object id;
if ( coerce ) { if ( coerce ) {
id = entityDescriptor.getIdentifierMapping().getJavaType().coerce( ids[i], session ); id = getLoadable().getIdentifierMapping().getJavaType().coerce( ids[i], session );
} }
else { else {
id = ids[i]; id = ids[i];
} }
final EntityKey entityKey = new EntityKey( id, entityDescriptor ); final EntityKey entityKey = new EntityKey( id, getLoadable().getEntityPersister() );
LoadEvent loadEvent = new LoadEvent( LoadEvent loadEvent = new LoadEvent(
id, id,
entityDescriptor.getMappedClass().getName(), getLoadable().getJavaType().getJavaTypeClass().getName(),
lockOptions, lockOptions,
session, session,
getReadOnlyFromLoadQueryInfluencers( session ) LoaderHelper.getReadOnlyFromLoadQueryInfluencers( session )
); );
Object managedEntity = null; Object managedEntity = null;
@ -370,7 +349,7 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
if ( managedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) { if ( managedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) {
managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache( managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent, loadEvent,
entityDescriptor, getLoadable().getEntityPersister(),
entityKey entityKey
); );
} }
@ -409,10 +388,10 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
maxBatchSize = loadOptions.getBatchSize(); maxBatchSize = loadOptions.getBatchSize();
} }
else { else {
maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getDefaultBatchLoadSizingStrategy().determineOptimalBatchLoadSize( maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getBatchLoadSizingStrategy().determineOptimalBatchLoadSize(
entityDescriptor.getIdentifierType().getColumnSpan( session.getFactory() ), getIdentifierMapping().getJdbcTypeCount(),
numberOfIdsLeft, numberOfIdsLeft,
sessionFactory.getSessionFactoryOptions().inClauseParameterPaddingEnabled() getSessionFactory().getSessionFactoryOptions().inClauseParameterPaddingEnabled()
); );
} }
@ -434,13 +413,4 @@ public class MultiIdLoaderStandard<T> implements MultiIdEntityLoader<T> {
return result; return result;
} }
private Boolean getReadOnlyFromLoadQueryInfluencers(SharedSessionContractImplementor session) {
Boolean readOnly = null;
final LoadQueryInfluencers loadQueryInfluencers = session.getLoadQueryInfluencers();
if ( loadQueryInfluencers != null ) {
readOnly = loadQueryInfluencers.getReadOnly();
}
return readOnly;
}
} }

View File

@ -0,0 +1,162 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import java.util.List;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.metamodel.mapping.Bindable;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.spi.ExecutionContext;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
/**
* When the number of ids to initialize exceeds a certain threshold, IN-predicate based
* {@linkplain org.hibernate.loader.ast.spi.MultiKeyLoader multi-key loaders} will break
* the initialization into "chunks".
*
* @author Steve Ebersole
*/
public class MultiKeyLoadChunker<K> {
@FunctionalInterface
interface SqlExecutionContextCreator {
ExecutionContext createContext(
JdbcParameterBindings parameterBindings,
SharedSessionContractImplementor session);
}
@FunctionalInterface
interface KeyCollector<K> {
void collect(K key, int relativePosition, int absolutePosition);
}
@FunctionalInterface
interface ChunkStartListener {
void chunkStartNotification(int startIndex);
}
@FunctionalInterface
interface ChunkBoundaryListener {
void chunkBoundaryNotification(int startIndex, int nonNullElementCount);
}
private final int chunkSize;
private final int keyColumnCount;
private final Bindable bindable;
private final List<JdbcParameter> jdbcParameters;
private final SelectStatement sqlAst;
private final JdbcOperationQuerySelect jdbcSelect;
public MultiKeyLoadChunker(
int chunkSize,
int keyColumnCount,
Bindable bindable,
List<JdbcParameter> jdbcParameters,
SelectStatement sqlAst,
JdbcOperationQuerySelect jdbcSelect) {
this.chunkSize = chunkSize;
this.keyColumnCount = keyColumnCount;
this.bindable = bindable;
this.jdbcParameters = jdbcParameters;
this.sqlAst = sqlAst;
this.jdbcSelect = jdbcSelect;
}
/**
* Process the chunks
*
* @param keys The group of keys to be initialized
* @param nonNullElementCount The number of non-null values in {@code keys}, which will be
* less-than-or-equal-to the number of {@code keys}
* @param startListener Notifications that processing a chunk has starting
* @param keyCollector Called for each key as it is processed
* @param boundaryListener Notifications that processing a chunk has completed
*/
public void processChunks(
K[] keys,
int nonNullElementCount,
SqlExecutionContextCreator sqlExecutionContextCreator,
KeyCollector<K> keyCollector,
ChunkStartListener startListener,
ChunkBoundaryListener boundaryListener,
SharedSessionContractImplementor session) {
int numberOfKeysLeft = nonNullElementCount;
int start = 0;
while ( numberOfKeysLeft > 0 ) {
processChunk( keys, start, sqlExecutionContextCreator, keyCollector, startListener, boundaryListener, session );
start += chunkSize;
numberOfKeysLeft -= chunkSize;
}
}
private void processChunk(
K[] keys,
int startIndex,
SqlExecutionContextCreator sqlExecutionContextCreator,
KeyCollector<K> keyCollector,
ChunkStartListener startListener,
ChunkBoundaryListener boundaryListener,
SharedSessionContractImplementor session) {
startListener.chunkStartNotification( startIndex );
final int parameterCount = chunkSize * keyColumnCount;
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl( parameterCount );
int nonNullCounter = 0;
int bindCount = 0;
for ( int i = 0; i < chunkSize; i++ ) {
// the position within `K[] keys`
final int keyPosition = i + startIndex;
final K value;
if ( keyPosition >= keys.length ) {
value = null;
}
else {
value = keys[keyPosition];
}
keyCollector.collect( value, i, keyPosition );
if ( value != null ) {
nonNullCounter++;
}
bindCount += jdbcParameterBindings.registerParametersForEachJdbcValue(
value,
bindCount,
bindable,
jdbcParameters,
session
);
}
assert bindCount == jdbcParameters.size();
if ( nonNullCounter == 0 ) {
// there are no non-null keys in the chunk
return;
}
session.getFactory().getJdbcServices().getJdbcSelectExecutor().list(
jdbcSelect,
jdbcParameterBindings,
sqlExecutionContextCreator.createContext( jdbcParameterBindings, session ),
RowTransformerStandardImpl.instance(),
ListResultsConsumer.UniqueSemantic.FILTER
);
boundaryListener.chunkBoundaryNotification( startIndex, nonNullCounter );
}
}

View File

@ -0,0 +1,59 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Steve Ebersole
*/
public class MultiKeyLoadHelper {
private MultiKeyLoadHelper() {
}
public static boolean supportsSqlArrayType(Dialect dialect) {
return dialect.supportsStandardArrays()
&& dialect.getPreferredSqlTypeCodeForArray() == SqlTypes.ARRAY;
}
public static JdbcMapping resolveArrayJdbcMapping(
BasicType<?> arrayBasicType,
JdbcMapping keyMapping,
Class<?> arrayClass,
SessionFactoryImplementor sessionFactory) {
if ( arrayBasicType != null ) {
return arrayBasicType;
}
final TypeConfiguration typeConfiguration = sessionFactory.getTypeConfiguration();
final JavaTypeRegistry javaTypeRegistry = typeConfiguration.getJavaTypeRegistry();
final JavaType<Object> rawArrayJavaType = javaTypeRegistry.resolveDescriptor( arrayClass );
if ( !(rawArrayJavaType instanceof BasicPluralJavaType ) ) {
throw new IllegalArgumentException( "Expecting BasicPluralJavaType for array class `" + arrayClass.getName() + "`, but got `" + rawArrayJavaType + "`" );
}
final BasicPluralJavaType<?> arrayJavaType = (BasicPluralJavaType<?>) rawArrayJavaType;
//noinspection unchecked,rawtypes
return arrayJavaType.resolveType(
typeConfiguration,
sessionFactory.getJdbcServices().getDialect(),
// potentially problematic - custom id type
(BasicType) keyMapping,
null,
typeConfiguration.getCurrentBaseSqlTypeIndicators()
);
}
}

View File

@ -0,0 +1,34 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.loader.ast.internal;
import org.hibernate.internal.log.SubSystemLogging;
import org.jboss.logging.Logger;
/**
* Logging related to loading a {@linkplain org.hibernate.loader.ast.spi.Loadable loadable}
* by multiple "keys". The key can be primary, foreign or natural.
*
* @see org.hibernate.annotations.BatchSize
* @see org.hibernate.Session#byMultipleIds
* @see org.hibernate.Session#byMultipleNaturalId
*
* @author Steve Ebersole
*/
@SubSystemLogging(
name = MultiKeyLoadLogging.LOGGER_NAME,
description = "Logging related to multi-key loading of entity and collection references"
)
public interface MultiKeyLoadLogging {
String LOGGER_NAME = SubSystemLogging.BASE + ".loader.multi";
Logger MULTI_KEY_LOAD_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean MULTI_KEY_LOAD_TRACE_ENABLED = MULTI_KEY_LOAD_LOGGER.isTraceEnabled();
boolean MULTI_KEY_LOAD_DEBUG_ENABLED = MULTI_KEY_LOAD_LOGGER.isDebugEnabled();
}

View File

@ -0,0 +1,120 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.loader.ast.internal;
import java.util.Collections;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.MultiNaturalIdLoadOptions;
import org.hibernate.loader.ast.spi.MultiNaturalIdLoader;
import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping;
import org.hibernate.metamodel.mapping.internal.SimpleNaturalIdMapping;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.JdbcParameterImpl;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
/**
* Standard MultiNaturalIdLoader implementation
*/
public class MultiNaturalIdLoaderArrayParam<E> implements MultiNaturalIdLoader<E>, SqlArrayMultiKeyLoader {
private final EntityMappingType entityDescriptor;
private final Class<?> keyArrayClass;
public MultiNaturalIdLoaderArrayParam(EntityMappingType entityDescriptor) {
assert entityDescriptor.getNaturalIdMapping() instanceof SimpleNaturalIdMapping;
this.entityDescriptor = entityDescriptor;
final Class<?> keyClass = entityDescriptor.getNaturalIdMapping().getJavaType().getJavaTypeClass();
this.keyArrayClass = LoaderHelper.createTypedArray( keyClass, 0 ).getClass();
}
@Override
public EntityMappingType getLoadable() {
return entityDescriptor;
}
protected SimpleNaturalIdMapping getNaturalIdMapping() {
return (SimpleNaturalIdMapping) entityDescriptor.getNaturalIdMapping();
}
protected BasicAttributeMapping getNaturalIdAttribute() {
return (BasicAttributeMapping) getNaturalIdMapping().asAttributeMapping();
}
@Override
public <K> List<E> multiLoad(K[] naturalIds, MultiNaturalIdLoadOptions loadOptions, SharedSessionContractImplementor session) {
if ( naturalIds == null ) {
throw new IllegalArgumentException( "`naturalIds` is null" );
}
if ( naturalIds.length == 0 ) {
return Collections.emptyList();
}
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) {
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef( "MultiNaturalIdLoaderArrayParam#multiLoadStarting - `%s`", entityDescriptor.getEntityName() );
}
final SessionFactoryImplementor sessionFactory = session.getFactory();
naturalIds = LoaderHelper.normalizeKeys( naturalIds, getNaturalIdAttribute(), session, sessionFactory );
final LockOptions lockOptions = (loadOptions.getLockOptions() == null)
? new LockOptions( LockMode.NONE )
: loadOptions.getLockOptions();
final BasicTypeRegistry basicTypeRegistry = sessionFactory.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<?> arrayBasicType = basicTypeRegistry.getRegisteredType( keyArrayClass );
final JdbcMapping arrayJdbcMapping = MultiKeyLoadHelper.resolveArrayJdbcMapping(
arrayBasicType,
getNaturalIdMapping().getSingleJdbcMapping(),
keyArrayClass,
sessionFactory
);
final JdbcParameter jdbcParameter = new JdbcParameterImpl( arrayJdbcMapping );
final SelectStatement sqlAst = LoaderSelectBuilder.createSelectBySingleArrayParameter(
getLoadable(),
getNaturalIdAttribute(),
session.getLoadQueryInfluencers(),
lockOptions,
jdbcParameter,
sessionFactory
);
final JdbcOperationQuerySelect jdbcSelectOperation = sessionFactory.getJdbcServices()
.getJdbcEnvironment()
.getSqlAstTranslatorFactory()
.buildSelectTranslator( sessionFactory, sqlAst )
.translate( JdbcParameterBindings.NO_BINDINGS, QueryOptions.NONE );
return LoaderHelper.loadByArrayParameter(
naturalIds,
sqlAst,
jdbcSelectOperation,
jdbcParameter,
arrayJdbcMapping,
null,
null,
lockOptions,
session.isDefaultReadOnly(),
session
);
}
}

View File

@ -15,21 +15,17 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.MultiNaturalIdLoadOptions; import org.hibernate.loader.ast.spi.MultiNaturalIdLoadOptions;
import org.hibernate.loader.ast.spi.MultiNaturalIdLoader; import org.hibernate.loader.ast.spi.MultiNaturalIdLoader;
import org.hibernate.loader.ast.spi.SqlInPredicateMultiKeyLoader;
import org.hibernate.metamodel.mapping.EntityMappingType; import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.sql.results.LoadingLogger; import org.hibernate.sql.results.LoadingLogger;
/** /**
* Standard MultiNaturalIdLoader implementation * MultiNaturalIdLoader implementation using SQL IN predicate to specify the ids
*/ */
public class MultiNaturalIdLoaderStandard<E> implements MultiNaturalIdLoader<E> { public class MultiNaturalIdLoaderInPredicate<E> implements MultiNaturalIdLoader<E>, SqlInPredicateMultiKeyLoader {
// todo (6.0) : much of the execution logic here is borrowed from `org.hibernate.loader.ast.internal.MultiIdEntityLoaderStandardImpl`
// - consider ways to consolidate/share logic
// - actually, org.hibernate.loader.ast.internal.MultiNaturalIdLoadingBatcher is pretty close
private final EntityMappingType entityDescriptor; private final EntityMappingType entityDescriptor;
public MultiNaturalIdLoaderStandard(EntityMappingType entityDescriptor) { public MultiNaturalIdLoaderInPredicate(EntityMappingType entityDescriptor) {
this.entityDescriptor = entityDescriptor; this.entityDescriptor = entityDescriptor;
} }
@ -54,7 +50,7 @@ public class MultiNaturalIdLoaderStandard<E> implements MultiNaturalIdLoader<E>
maxBatchSize = options.getBatchSize(); maxBatchSize = options.getBatchSize();
} }
else { else {
maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getDefaultBatchLoadSizingStrategy().determineOptimalBatchLoadSize( maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getMultiKeyLoadSizingStrategy().determineOptimalBatchLoadSize(
entityDescriptor.getNaturalIdMapping().getJdbcTypeCount(), entityDescriptor.getNaturalIdMapping().getJdbcTypeCount(),
naturalIds.length, naturalIds.length,
sessionFactory.getSessionFactoryOptions().inClauseParameterPaddingEnabled() sessionFactory.getSessionFactoryOptions().inClauseParameterPaddingEnabled()

View File

@ -1,243 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.loader.ast.internal;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.LockOptions;
import org.hibernate.engine.internal.BatchFetchQueueHelper;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.query.spi.QueryOptionsAdapter;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.internal.BaseExecutionContext;
import org.hibernate.sql.exec.internal.JdbcParameterBindingsImpl;
import org.hibernate.sql.exec.spi.ExecutionContext;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.results.graph.entity.LoadingEntityEntry;
import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class SingleIdEntityLoaderDynamicBatch<T> extends SingleIdEntityLoaderSupport<T> {
private static final Logger log = Logger.getLogger( SingleIdEntityLoaderDynamicBatch.class );
private final int maxBatchSize;
private SingleIdEntityLoaderStandardImpl<T> singleIdLoader;
public SingleIdEntityLoaderDynamicBatch(
EntityMappingType entityDescriptor,
int maxBatchSize,
SessionFactoryImplementor sessionFactory) {
super( entityDescriptor, sessionFactory );
this.maxBatchSize = maxBatchSize;
}
@Override
public T load(Object pkValue, LockOptions lockOptions, Boolean readOnly, SharedSessionContractImplementor session) {
return load( pkValue, null, lockOptions, readOnly, session );
}
@Override
public T load(
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
final Object[] batchIds = session.getPersistenceContextInternal()
.getBatchFetchQueue()
.getBatchLoadableEntityIds( getLoadable(), pkValue, maxBatchSize );
final int numberOfIds = ArrayHelper.countNonNull( batchIds );
if ( numberOfIds <= 1 ) {
initializeSingleIdLoaderIfNeeded( session );
final T result = singleIdLoader.load( pkValue, entityInstance, lockOptions, readOnly, session );
if ( result == null ) {
// There was no entity with the specified ID. Make sure the EntityKey does not remain
// in the batch to avoid including it in future batches that get executed.
BatchFetchQueueHelper.removeBatchLoadableEntityKey( pkValue, getLoadable(), session );
}
return result;
}
final Object[] idsToLoad = new Object[numberOfIds];
System.arraycopy( batchIds, 0, idsToLoad, 0, numberOfIds );
if ( log.isDebugEnabled() ) {
log.debugf( "Batch loading entity [%s] : %s", getLoadable().getEntityName(), idsToLoad );
}
final List<JdbcParameter> jdbcParameters = new ArrayList<>();
final SelectStatement sqlAst = LoaderSelectBuilder.createSelect(
getLoadable(),
// null here means to select everything
null,
getLoadable().getIdentifierMapping(),
null,
numberOfIds,
session.getLoadQueryInfluencers(),
lockOptions,
jdbcParameters::add,
session.getFactory()
);
final SessionFactoryImplementor sessionFactory = session.getFactory();
final JdbcServices jdbcServices = sessionFactory.getJdbcServices();
final JdbcEnvironment jdbcEnvironment = jdbcServices.getJdbcEnvironment();
final SqlAstTranslatorFactory sqlAstTranslatorFactory = jdbcEnvironment.getSqlAstTranslatorFactory();
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl(
getLoadable().getIdentifierMapping().getJdbcTypeCount()
);
int offset = 0;
for ( int i = 0; i < numberOfIds; i++ ) {
offset += jdbcParameterBindings.registerParametersForEachJdbcValue(
idsToLoad[i],
offset,
getLoadable().getIdentifierMapping(),
jdbcParameters,
session
);
}
assert offset == jdbcParameters.size();
final JdbcOperationQuerySelect jdbcSelect = sqlAstTranslatorFactory
.buildSelectTranslator( sessionFactory, sqlAst )
.translate( jdbcParameterBindings, QueryOptions.NONE );
final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler = SubselectFetch.createRegistrationHandler(
session.getPersistenceContext().getBatchFetchQueue(),
sqlAst,
jdbcParameters,
jdbcParameterBindings
);
session.getJdbcServices().getJdbcSelectExecutor().list(
jdbcSelect,
jdbcParameterBindings,
getExecutionContext(
pkValue,
entityInstance,
readOnly,
lockOptions,
session,
subSelectFetchableKeysHandler
),
RowTransformerStandardImpl.instance(),
ListResultsConsumer.UniqueSemantic.FILTER
);
//noinspection ForLoopReplaceableByForEach
for ( int i = 0; i < idsToLoad.length; i++ ) {
final Object id = idsToLoad[i];
// found or not, remove the key from the batch-fetch queye
BatchFetchQueueHelper.removeBatchLoadableEntityKey( id, getLoadable(), session );
}
final EntityKey entityKey = session.generateEntityKey( pkValue, getLoadable().getEntityPersister() );
//noinspection unchecked
return (T) session.getPersistenceContext().getEntity( entityKey );
}
private ExecutionContext getExecutionContext(
Object entityId,
Object entityInstance,
Boolean readOnly,
LockOptions lockOptions,
SharedSessionContractImplementor session,
SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler) {
return new SingleIdExecutionContext( session,
entityInstance,
entityId,
readOnly,
lockOptions,
subSelectFetchableKeysHandler
);
}
private void initializeSingleIdLoaderIfNeeded(SharedSessionContractImplementor session) {
if ( singleIdLoader == null ) {
singleIdLoader = new SingleIdEntityLoaderStandardImpl<>( getLoadable(), session.getFactory() );
singleIdLoader.prepare();
}
}
private static class SingleIdExecutionContext extends BaseExecutionContext {
private final Object entityInstance;
private final Object entityId;
private final Boolean readOnly;
private final LockOptions lockOptions;
private final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler;
public SingleIdExecutionContext(
SharedSessionContractImplementor session,
Object entityInstance,
Object entityId,
Boolean readOnly,
LockOptions lockOptions,
SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler) {
super( session );
this.entityInstance = entityInstance;
this.entityId = entityId;
this.readOnly = readOnly;
this.lockOptions = lockOptions;
this.subSelectFetchableKeysHandler = subSelectFetchableKeysHandler;
}
@Override
public Object getEntityInstance() {
return entityInstance;
}
@Override
public Object getEntityId() {
return entityId;
}
@Override
public QueryOptions getQueryOptions() {
return new QueryOptionsAdapter() {
@Override
public Boolean isReadOnly() {
return readOnly;
}
@Override
public LockOptions getLockOptions() {
return lockOptions;
}
};
}
@Override
public void registerLoadingEntityEntry(EntityKey entityKey, LoadingEntityEntry entry) {
subSelectFetchableKeysHandler.addKey( entityKey, entry );
}
}
}

View File

@ -0,0 +1,73 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.SubselectFetch;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.query.spi.QueryOptionsAdapter;
import org.hibernate.sql.exec.internal.BaseExecutionContext;
import org.hibernate.sql.results.graph.entity.LoadingEntityEntry;
/**
* @author Steve Ebersole
*/
class SingleIdExecutionContext extends BaseExecutionContext {
private final Object entityInstance;
private final Object entityId;
private final Boolean readOnly;
private final LockOptions lockOptions;
private final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler;
public SingleIdExecutionContext(
Object entityId,
Object entityInstance,
Boolean readOnly,
LockOptions lockOptions,
SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler,
SharedSessionContractImplementor session) {
super( session );
this.entityInstance = entityInstance;
this.entityId = entityId;
this.readOnly = readOnly;
this.lockOptions = lockOptions;
this.subSelectFetchableKeysHandler = subSelectFetchableKeysHandler;
}
@Override
public Object getEntityInstance() {
return entityInstance;
}
@Override
public Object getEntityId() {
return entityId;
}
@Override
public QueryOptions getQueryOptions() {
return new QueryOptionsAdapter() {
@Override
public Boolean isReadOnly() {
return readOnly;
}
@Override
public LockOptions getLockOptions() {
return lockOptions;
}
};
}
@Override
public void registerLoadingEntityEntry(EntityKey entityKey, LoadingEntityEntry entry) {
subSelectFetchableKeysHandler.addKey( entityKey, entry );
}
}

View File

@ -14,6 +14,7 @@ import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.Loadable; import org.hibernate.loader.ast.spi.Loadable;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.ModelPart; import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.internal.SimpleQueryOptions; import org.hibernate.query.internal.SimpleQueryOptions;
import org.hibernate.query.spi.QueryOptions; import org.hibernate.query.spi.QueryOptions;
@ -41,20 +42,20 @@ import org.hibernate.sql.results.spi.RowTransformer;
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class SingleIdLoadPlan<T> implements SingleEntityLoadPlan { public class SingleIdLoadPlan<T> implements SingleEntityLoadPlan {
private final org.hibernate.persister.entity.Loadable persister; private final EntityMappingType entityMappingType;
private final ModelPart restrictivePart; private final ModelPart restrictivePart;
private final LockOptions lockOptions; private final LockOptions lockOptions;
private final JdbcOperationQuerySelect jdbcSelect; private final JdbcOperationQuerySelect jdbcSelect;
private final List<JdbcParameter> jdbcParameters; private final List<JdbcParameter> jdbcParameters;
public SingleIdLoadPlan( public SingleIdLoadPlan(
org.hibernate.persister.entity.Loadable persister, EntityMappingType entityMappingType,
ModelPart restrictivePart, ModelPart restrictivePart,
SelectStatement sqlAst, SelectStatement sqlAst,
List<JdbcParameter> jdbcParameters, List<JdbcParameter> jdbcParameters,
LockOptions lockOptions, LockOptions lockOptions,
SessionFactoryImplementor sessionFactory) { SessionFactoryImplementor sessionFactory) {
this.persister = persister; this.entityMappingType = entityMappingType;
this.restrictivePart = restrictivePart; this.restrictivePart = restrictivePart;
this.lockOptions = lockOptions.makeCopy(); this.lockOptions = lockOptions.makeCopy();
this.jdbcParameters = jdbcParameters; this.jdbcParameters = jdbcParameters;
@ -83,7 +84,7 @@ public class SingleIdLoadPlan<T> implements SingleEntityLoadPlan {
@Override @Override
public Loadable getLoadable() { public Loadable getLoadable() {
return persister; return entityMappingType;
} }
@Override @Override
@ -154,8 +155,8 @@ public class SingleIdLoadPlan<T> implements SingleEntityLoadPlan {
} }
final T entity = list.get( 0 ); final T entity = list.get( 0 );
if ( persister != null ) { if ( entityMappingType != null ) {
callback.invokeAfterLoadActions( session, entity, persister ); callback.invokeAfterLoadActions( entity, entityMappingType, session );
} }
return entity; return entity;
} }

View File

@ -73,7 +73,6 @@ public class SingleUniqueKeyEntityLoaderStandard<T> implements SingleUniqueKeyEn
Collections.emptyList(), Collections.emptyList(),
uniqueKeyAttribute, uniqueKeyAttribute,
null, null,
1,
LoadQueryInfluencers.NONE, LoadQueryInfluencers.NONE,
LockOptions.NONE, LockOptions.NONE,
jdbcParameters::add, jdbcParameters::add,
@ -129,7 +128,6 @@ public class SingleUniqueKeyEntityLoaderStandard<T> implements SingleUniqueKeyEn
Collections.singletonList( entityDescriptor.getIdentifierMapping() ), Collections.singletonList( entityDescriptor.getIdentifierMapping() ),
uniqueKeyAttribute, uniqueKeyAttribute,
null, null,
1,
LoadQueryInfluencers.NONE, LoadQueryInfluencers.NONE,
LockOptions.NONE, LockOptions.NONE,
jdbcParameters::add, jdbcParameters::add,

View File

@ -0,0 +1,74 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.internal;
import java.util.Map;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.loader.ast.spi.BatchLoaderFactory;
import org.hibernate.loader.ast.spi.CollectionBatchLoader;
import org.hibernate.loader.ast.spi.EntityBatchLoader;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.Type;
/**
* Standard {@link BatchLoaderFactory} implementation
*
* @author Steve Ebersole
*/
public class StandardBatchLoaderFactory implements BatchLoaderFactory {
@SuppressWarnings("unused")
public StandardBatchLoaderFactory(Map<String, Object> configurationValues, ServiceRegistryImplementor registry) {
}
@Override
public <T> EntityBatchLoader<T> createEntityBatchLoader(
int domainBatchSize, EntityMappingType entityDescriptor,
SessionFactoryImplementor factory) {
final Dialect dialect = factory.getJdbcServices().getDialect();
// NOTE : don't use the EntityIdentifierMapping here because it will not be known until later
final Type identifierType = entityDescriptor.getEntityPersister().getIdentifierType();
final int idColumnCount = identifierType.getColumnSpan( factory );
if ( idColumnCount == 1
&& MultiKeyLoadHelper.supportsSqlArrayType( dialect )
&& identifierType instanceof BasicType ) {
// we can use a single ARRAY parameter to send all the ids
return new EntityBatchLoaderArrayParam<>( domainBatchSize, entityDescriptor, factory );
}
final int optimalBatchSize = dialect
.getBatchLoadSizingStrategy()
.determineOptimalBatchLoadSize( idColumnCount, domainBatchSize, false );
return new EntityBatchLoaderInPredicate<>( domainBatchSize, optimalBatchSize, entityDescriptor, factory );
}
@Override
public CollectionBatchLoader createCollectionBatchLoader(
int domainBatchSize,
LoadQueryInfluencers influencers,
PluralAttributeMapping attributeMapping,
SessionFactoryImplementor factory) {
final Dialect dialect = factory.getJdbcServices().getDialect();
final int columnCount = attributeMapping.getKeyDescriptor().getJdbcTypeCount();
if ( columnCount == 1
&& dialect.supportsStandardArrays()
&& dialect.getPreferredSqlTypeCodeForArray() == SqlTypes.ARRAY ) {
// we can use a single ARRAY parameter to send all the ids
return new CollectionBatchLoaderArrayParam( domainBatchSize, influencers, attributeMapping, factory );
}
return new CollectionBatchLoaderInPredicate( domainBatchSize, influencers, attributeMapping, factory );
}
}

View File

@ -7,11 +7,25 @@
package org.hibernate.loader.ast.spi; package org.hibernate.loader.ast.spi;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.persister.entity.Loadable; import org.hibernate.metamodel.mapping.EntityMappingType;
/** /**
* An action to be performed after an entity has been loaded. E.g. applying locks
*
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public interface AfterLoadAction { public interface AfterLoadAction {
void afterLoad(SharedSessionContractImplementor session, Object entity, Loadable persister); /**
* The action trigger - the {@code entity} is being loaded
*/
void afterLoad(Object entity, EntityMappingType entityMappingType, SharedSessionContractImplementor session);
/**
* @deprecated Use the {@linkplain #afterLoad(Object, EntityMappingType, SharedSessionContractImplementor) updated form}
*/
@SuppressWarnings("removal")
@Deprecated(since = "6", forRemoval = true)
default void afterLoad(SharedSessionContractImplementor session, Object entity, org.hibernate.persister.entity.Loadable persister) {
afterLoad( entity, persister, session );
}
} }

View File

@ -0,0 +1,19 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
/**
* Commonality for loading a {@linkplain Loadable loadable} in "batch" (more than one key at a time)
*
* @author Steve Ebersole
*/
public interface BatchLoader extends MultiKeyLoader {
/**
* The total number of {@linkplain Loadable loadable} references that can be initialized per each load operation.
*/
int getDomainBatchSize();
}

View File

@ -0,0 +1,44 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.service.Service;
/**
* Factory for {@link BatchLoader} instances
*
* @author Steve Ebersole
*/
public interface BatchLoaderFactory extends Service {
/**
* Create a BatchLoader for batch-loadable entities.
*
* @param domainBatchSize The total number of entities (max) that will be need to be initialized
* @param entityDescriptor The entity mapping metadata
*/
<T> EntityBatchLoader<T> createEntityBatchLoader(
int domainBatchSize,
EntityMappingType entityDescriptor,
SessionFactoryImplementor factory);
/**
* Create a BatchLoader for batch-loadable collections.
*
* @param domainBatchSize The total number of collections (max) that will be initialized for any {@link CollectionBatchLoader#load}
* @param influencers Any load query influencers (filters, fetch-profiles, ...) to apply to the SQL
* @param attributeMapping The collection mapping metadata
*/
CollectionBatchLoader createCollectionBatchLoader(
int domainBatchSize,
LoadQueryInfluencers influencers,
PluralAttributeMapping attributeMapping,
SessionFactoryImplementor factory);
}

View File

@ -0,0 +1,15 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
/**
* BatchLoader specialization for {@linkplain org.hibernate.metamodel.mapping.PluralAttributeMapping collection} fetching
*
* @author Steve Ebersole
*/
public interface CollectionBatchLoader extends BatchLoader, CollectionLoader {
}

View File

@ -0,0 +1,15 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
/**
* BatchLoader specialization for {@linkplain org.hibernate.metamodel.mapping.EntityMappingType entity} fetching
*
* @author Steve Ebersole
*/
public interface EntityBatchLoader<T> extends BatchLoader, SingleIdEntityLoader<T> {
}

View File

@ -7,9 +7,9 @@
package org.hibernate.loader.ast.spi; package org.hibernate.loader.ast.spi;
/** /**
* Commonality for multi-loading * Commonality for multi-loading an {@linkplain org.hibernate.metamodel.mapping.EntityMappingType entity}
* *
* @param <T> The loaded model part * @param <T> The loaded model part
*/ */
public interface MultiLoader<T> extends EntityLoader { public interface EntityMultiLoader<T> extends EntityLoader, MultiKeyLoader {
} }

View File

@ -28,6 +28,19 @@ public interface Loadable extends ModelPart, RootTableGroupProducer {
*/ */
String getRootPathName(); String getRootPathName();
default boolean isAffectedByInfluencers(LoadQueryInfluencers influencers) {
return isAffectedByEntityGraph( influencers )
|| isAffectedByEnabledFetchProfiles( influencers )
|| isAffectedByEnabledFilters( influencers );
}
default boolean isNotAffectedByInfluencers(LoadQueryInfluencers influencers) {
return !isAffectedByEntityGraph( influencers )
&& !isAffectedByEnabledFetchProfiles( influencers )
&& !isAffectedByEnabledFilters( influencers )
&& influencers.getEnabledCascadingFetchProfile() == null;
}
/** /**
* Whether any of the "influencers" affect this loadable. * Whether any of the "influencers" affect this loadable.
*/ */

View File

@ -8,13 +8,12 @@ package org.hibernate.loader.ast.spi;
import java.util.List; import java.util.List;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.event.spi.EventSource; import org.hibernate.event.spi.EventSource;
/** /**
* Loader subtype for loading multiple entities by multiple identifier values. * Loader subtype for loading multiple entities by multiple identifier values.
*/ */
public interface MultiIdEntityLoader<T> extends MultiLoader<T> { public interface MultiIdEntityLoader<T> extends EntityMultiLoader<T> {
/** /**
* Load multiple entities by id. The exact result depends on the passed options. * Load multiple entities by id. The exact result depends on the passed options.
*/ */

View File

@ -0,0 +1,50 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
import org.hibernate.dialect.Dialect;
/**
* Strategy for determining an optimal size for loading by multiple keys. The
* optimal size is defined as the most appropriate number of key values to load
* in any single SQL query.
*
* @apiNote This is used with IN-list style loading to determine the number
* of keys to encode into the SQL restriction to make sure we do not exceed
* database/driver limits on the number of JDBC parameters. Generally, prefer
* using a SQL ARRAY parameter for the keys instead if the database/driver
* supports it.
*
* @see Dialect#getMultiKeyLoadSizingStrategy()
* @see org.hibernate.annotations.BatchSize
* @see org.hibernate.Session#byMultipleIds
* @see org.hibernate.Session#byMultipleNaturalId
*
* @author Steve Ebersole
*/
@FunctionalInterface
public interface MultiKeyLoadSizingStrategy {
/**
* Determine the optimal batch size (number of key values) to load at a time.
* <p/>
* The return can be less than the total {@code numberOfKeys} to be loaded indicating
* that the load should be split across multiple SQL queries. E.g. if we are loading
* 7 keys and the strategy says the optimal size is 5, we will perform 2 queries.
* <p/>
* @apiNote
*
* @param numberOfKeyColumns The number of columns to which the key is mapped
* @param numberOfKeys The total number of keys we need to load
* @param inClauseParameterPaddingEnabled See {@link org.hibernate.cfg.AvailableSettings#IN_CLAUSE_PARAMETER_PADDING}
*
* @return The number of keys to load at once. The total number of JDBC parameters needed for that load is
* defined by {@code numberOfKeys} * {@code numberOfKeyColumns}. The strategy should take care to ensure that
* {@code numberOfKeys} * {@code numberOfKeyColumns} does not exceed any database/driver limits on the number
* of parameters allowed in a {@linkplain java.sql.PreparedStatement}.
*/
int determineOptimalBatchLoadSize(int numberOfKeyColumns, int numberOfKeys, boolean inClauseParameterPaddingEnabled);
}

View File

@ -0,0 +1,16 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
/**
* Loader specialization for loading multiple {@linkplain Loadable loadable}
* references by primary, foreign or natural key.
*
* @author Steve Ebersole
*/
public interface MultiKeyLoader extends Loader {
}

View File

@ -15,7 +15,7 @@ import org.hibernate.engine.spi.SharedSessionContractImplementor;
* *
* @param <E> The entity Java type * @param <E> The entity Java type
*/ */
public interface MultiNaturalIdLoader<E> extends MultiLoader<E> { public interface MultiNaturalIdLoader<E> extends EntityMultiLoader<E> {
/** /**
* Load multiple entities by natural-id. The exact result depends on the passed options. * Load multiple entities by natural-id. The exact result depends on the passed options.
* *

View File

@ -13,7 +13,7 @@ import org.hibernate.engine.spi.SharedSessionContractImplementor;
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public interface NaturalIdLoader<T> extends EntityLoader { public interface NaturalIdLoader<T> extends EntityLoader, MultiKeyLoader {
/** /**
* Perform the load of the entity by its natural-id * Perform the load of the entity by its natural-id

View File

@ -0,0 +1,17 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
/**
* MultiKeyLoader implementation based on a SQL ARRAY valued parameter
*
* @see SqlInPredicateMultiKeyLoader
*
* @author Steve Ebersole
*/
public interface SqlArrayMultiKeyLoader extends MultiKeyLoader {
}

View File

@ -0,0 +1,17 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.loader.ast.spi;
/**
* MultiKeyLoader implementation based on SQL IN predicate
*
* @see SqlArrayMultiKeyLoader
*
* @author Steve Ebersole
*/
public interface SqlInPredicateMultiKeyLoader extends MultiKeyLoader {
}

View File

@ -17,7 +17,7 @@ import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.MetadataBuildingContext; import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataImplementor; import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.internal.util.collections.CollectionHelper; import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.MappingModelCreationLogger; import org.hibernate.metamodel.mapping.MappingModelCreationLogging;
import org.hibernate.resource.beans.internal.FallbackBeanInstanceProducer; import org.hibernate.resource.beans.internal.FallbackBeanInstanceProducer;
import org.hibernate.resource.beans.spi.ManagedBean; import org.hibernate.resource.beans.spi.ManagedBean;
import org.hibernate.resource.beans.spi.ManagedBeanRegistry; import org.hibernate.resource.beans.spi.ManagedBeanRegistry;
@ -34,6 +34,8 @@ import org.hibernate.type.Type;
import org.hibernate.usertype.ParameterizedType; import org.hibernate.usertype.ParameterizedType;
import org.hibernate.usertype.UserCollectionType; import org.hibernate.usertype.UserCollectionType;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
*/ */
@ -107,7 +109,7 @@ public final class MappingHelper {
( (ParameterizedType) type ).setParameterValues( parameters == null ? EMPTY_PROPERTIES : parameters ); ( (ParameterizedType) type ).setParameterValues( parameters == null ? EMPTY_PROPERTIES : parameters );
} }
else if ( parameters != null && !parameters.isEmpty() ) { else if ( parameters != null && !parameters.isEmpty() ) {
MappingModelCreationLogger.LOGGER.debugf( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf(
"UserCollectionType impl does not implement ParameterizedType but parameters were present : `%s`", "UserCollectionType impl does not implement ParameterizedType but parameters were present : `%s`",
type.getClass().getName() type.getClass().getName()
); );

View File

@ -21,14 +21,15 @@ import org.jboss.logging.annotations.ValidIdRange;
@MessageLogger( projectCode = "HHH" ) @MessageLogger( projectCode = "HHH" )
@ValidIdRange( min = 90005701, max = 90005800 ) @ValidIdRange( min = 90005701, max = 90005800 )
@SubSystemLogging( @SubSystemLogging(
name = MappingModelCreationLogger.LOGGER_NAME, name = MappingModelCreationLogging.LOGGER_NAME,
description = "Logging related to building of Hibernate's runtime metamodel descriptors of the domain model" description = "Logging related to building of Hibernate's runtime metamodel descriptors of the domain model"
) )
public interface MappingModelCreationLogger extends BasicLogger { public interface MappingModelCreationLogging extends BasicLogger {
String LOGGER_NAME = SubSystemLogging.BASE + ".model.mapping.creation"; String LOGGER_NAME = SubSystemLogging.BASE + ".model.mapping.creation";
MappingModelCreationLogger LOGGER = Logger.getMessageLogger( MappingModelCreationLogger.class, LOGGER_NAME ); Logger MAPPING_MODEL_CREATION_LOGGER = Logger.getLogger( LOGGER_NAME );
MappingModelCreationLogging MAPPING_MODEL_CREATION_MESSAGE_LOGGER = Logger.getMessageLogger( MappingModelCreationLogging.class, LOGGER_NAME );
boolean TRACE_ENABLED = LOGGER.isTraceEnabled(); boolean MAPPING_MODEL_CREATION_TRACE_ENABLED = MAPPING_MODEL_CREATION_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled(); boolean MAPPING_MODEL_CREATION_DEBUG_ENABLED = MAPPING_MODEL_CREATION_LOGGER.isDebugEnabled();
} }

View File

@ -20,7 +20,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.IndexedConsumer; import org.hibernate.internal.util.IndexedConsumer;
import org.hibernate.loader.ast.internal.CompoundNaturalIdLoader; import org.hibernate.loader.ast.internal.CompoundNaturalIdLoader;
import org.hibernate.loader.ast.internal.MultiNaturalIdLoaderStandard; import org.hibernate.loader.ast.internal.MultiNaturalIdLoaderInPredicate;
import org.hibernate.loader.ast.spi.MultiNaturalIdLoader; import org.hibernate.loader.ast.spi.MultiNaturalIdLoader;
import org.hibernate.loader.ast.spi.NaturalIdLoader; import org.hibernate.loader.ast.spi.NaturalIdLoader;
import org.hibernate.metamodel.UnsupportedMappingException; import org.hibernate.metamodel.UnsupportedMappingException;
@ -251,7 +251,7 @@ public class CompoundNaturalIdMapping extends AbstractNaturalIdMapping implement
@Override @Override
public MultiNaturalIdLoader<?> makeMultiLoader(EntityMappingType entityDescriptor) { public MultiNaturalIdLoader<?> makeMultiLoader(EntityMappingType entityDescriptor) {
return new MultiNaturalIdLoaderStandard<>( entityDescriptor ); return new MultiNaturalIdLoaderInPredicate<>( entityDescriptor );
} }
@Override @Override

View File

@ -62,7 +62,6 @@ import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.ForeignKeyDescriptor; import org.hibernate.metamodel.mapping.ForeignKeyDescriptor;
import org.hibernate.metamodel.mapping.JdbcMapping; import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.ManagedMappingType; import org.hibernate.metamodel.mapping.ManagedMappingType;
import org.hibernate.metamodel.mapping.MappingModelCreationLogger;
import org.hibernate.metamodel.mapping.ModelPart; import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.ModelPartContainer; import org.hibernate.metamodel.mapping.ModelPartContainer;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
@ -101,7 +100,8 @@ import org.hibernate.type.descriptor.java.MutabilityPlan;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry; import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogger.LOGGER; import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_DEBUG_ENABLED;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER;
import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnReferenceKey; import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnReferenceKey;
/** /**
@ -208,7 +208,7 @@ public class MappingModelCreationHelper {
final FetchStyle fetchStyle; final FetchStyle fetchStyle;
if ( declaringType instanceof EmbeddableMappingType ) { if ( declaringType instanceof EmbeddableMappingType ) {
if ( bootProperty.isLazy() ) { if ( bootProperty.isLazy() ) {
LOGGER.debugf( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf(
"Attribute was declared lazy, but is part of an embeddable - `%s#%s` - LAZY will be ignored", "Attribute was declared lazy, but is part of an embeddable - `%s#%s` - LAZY will be ignored",
declaringType.getNavigableRole().getFullPath(), declaringType.getNavigableRole().getFullPath(),
bootProperty.getName() bootProperty.getName()
@ -656,24 +656,23 @@ public class MappingModelCreationHelper {
sessionFactory sessionFactory
); );
final PluralAttributeMappingImpl pluralAttributeMapping = mappingConverter final PluralAttributeMappingImpl pluralAttributeMapping = mappingConverter.apply( new PluralAttributeMappingImpl(
.apply( new PluralAttributeMappingImpl( attrName,
attrName, bootValueMapping,
bootValueMapping, propertyAccess,
propertyAccess, attributeMetadata,
attributeMetadata, collectionMappingType,
collectionMappingType, stateArrayPosition,
stateArrayPosition, fetchableIndex,
fetchableIndex, elementDescriptor,
elementDescriptor, indexDescriptor,
indexDescriptor, identifierDescriptor,
identifierDescriptor, timing,
timing, style,
style, cascadeStyle,
cascadeStyle, declaringType,
declaringType, collectionDescriptor
collectionDescriptor ) );
) );
creationProcess.registerInitializationCallback( creationProcess.registerInitializationCallback(
"PluralAttributeMapping(" + bootValueMapping.getRole() + ")#finishInitialization", "PluralAttributeMapping(" + bootValueMapping.getRole() + ")#finishInitialization",
@ -1695,8 +1694,8 @@ public class MappingModelCreationHelper {
|| value instanceof ManyToOne && value.isNullable() && ( (ManyToOne) value ).isIgnoreNotFound() ) { || value instanceof ManyToOne && value.isNullable() && ( (ManyToOne) value ).isIgnoreNotFound() ) {
fetchTiming = FetchTiming.IMMEDIATE; fetchTiming = FetchTiming.IMMEDIATE;
if ( lazy ) { if ( lazy ) {
if ( MappingModelCreationLogger.DEBUG_ENABLED ) { if ( MAPPING_MODEL_CREATION_DEBUG_ENABLED ) {
MappingModelCreationLogger.LOGGER.debugf( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf(
"Forcing FetchTiming.IMMEDIATE for to-one association : %s.%s", "Forcing FetchTiming.IMMEDIATE for to-one association : %s.%s",
declaringType.getNavigableRole(), declaringType.getNavigableRole(),
bootProperty.getName() bootProperty.getName()

View File

@ -13,7 +13,6 @@ import java.util.Map;
import java.util.function.Consumer; import java.util.function.Consumer;
import org.hibernate.metamodel.mapping.ForeignKeyDescriptor; import org.hibernate.metamodel.mapping.ForeignKeyDescriptor;
import org.hibernate.metamodel.mapping.MappingModelCreationLogger;
import org.hibernate.metamodel.mapping.ModelPart; import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.NonTransientException; import org.hibernate.metamodel.mapping.NonTransientException;
import org.hibernate.metamodel.model.domain.NavigableRole; import org.hibernate.metamodel.model.domain.NavigableRole;
@ -21,6 +20,9 @@ import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_TRACE_ENABLED;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
*/ */
@ -41,11 +43,9 @@ public class MappingModelCreationProcess {
} }
private final Map<String,EntityPersister> entityPersisterMap; private final Map<String,EntityPersister> entityPersisterMap;
private final RuntimeModelCreationContext creationContext; private final RuntimeModelCreationContext creationContext;
private String currentlyProcessingRole; private String currentlyProcessingRole;
private List<PostInitCallbackEntry> postInitCallbacks; private List<PostInitCallbackEntry> postInitCallbacks;
private MappingModelCreationProcess( private MappingModelCreationProcess(
@ -89,7 +89,7 @@ public class MappingModelCreationProcess {
} }
private void executePostInitCallbacks() { private void executePostInitCallbacks() {
MappingModelCreationLogger.LOGGER.debugf( "Starting post-init callbacks" ); MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf( "Starting post-init callbacks" );
Map<PostInitCallbackEntry, Exception> exceptions = new HashMap<>(); Map<PostInitCallbackEntry, Exception> exceptions = new HashMap<>();
while ( postInitCallbacks != null && !postInitCallbacks.isEmpty() ) { while ( postInitCallbacks != null && !postInitCallbacks.isEmpty() ) {
@ -112,7 +112,7 @@ public class MappingModelCreationProcess {
} }
catch (Exception e) { catch (Exception e) {
if ( e instanceof NonTransientException ) { if ( e instanceof NonTransientException ) {
MappingModelCreationLogger.LOGGER.debugf( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf(
"Mapping-model creation encountered non-transient error : %s", "Mapping-model creation encountered non-transient error : %s",
e e
); );
@ -121,11 +121,11 @@ public class MappingModelCreationProcess {
exceptions.put( callbackEntry, e ); exceptions.put( callbackEntry, e );
final String format = "Mapping-model creation encountered (possibly) transient error : %s"; final String format = "Mapping-model creation encountered (possibly) transient error : %s";
if ( MappingModelCreationLogger.TRACE_ENABLED ) { if ( MAPPING_MODEL_CREATION_TRACE_ENABLED ) {
MappingModelCreationLogger.LOGGER.tracef( e, format, e ); MAPPING_MODEL_CREATION_MESSAGE_LOGGER.tracef( e, format, e );
} }
else { else {
MappingModelCreationLogger.LOGGER.debugf( format, e ); MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf( format, e );
} }
} }
} }
@ -239,7 +239,7 @@ public class MappingModelCreationProcess {
} }
private boolean process() { private boolean process() {
MappingModelCreationLogger.LOGGER.debugf( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf(
"Starting PostInitCallbackEntry : %s", "Starting PostInitCallbackEntry : %s",
description description
); );

View File

@ -13,6 +13,7 @@ import java.util.function.Supplier;
import org.hibernate.cache.MutableCacheKeyBuilder; import org.hibernate.cache.MutableCacheKeyBuilder;
import org.hibernate.engine.FetchStyle; import org.hibernate.engine.FetchStyle;
import org.hibernate.engine.FetchTiming; import org.hibernate.engine.FetchTiming;
import org.hibernate.engine.profile.internal.FetchProfileAffectee;
import org.hibernate.engine.spi.CascadeStyle; import org.hibernate.engine.spi.CascadeStyle;
import org.hibernate.engine.spi.LoadQueryInfluencers; import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
@ -75,7 +76,7 @@ import org.jboss.logging.Logger;
*/ */
public class PluralAttributeMappingImpl public class PluralAttributeMappingImpl
extends AbstractAttributeMapping extends AbstractAttributeMapping
implements PluralAttributeMapping, FetchOptions { implements PluralAttributeMapping, FetchProfileAffectee, FetchOptions {
private static final Logger log = Logger.getLogger( PluralAttributeMappingImpl.class ); private static final Logger log = Logger.getLogger( PluralAttributeMappingImpl.class );
/** /**
@ -894,6 +895,13 @@ public class PluralAttributeMappingImpl
return getCollectionDescriptor().isAffectedByEntityGraph( influencers ); return getCollectionDescriptor().isAffectedByEntityGraph( influencers );
} }
@Override
public void registerAffectingFetchProfile(String fetchProfileName, org.hibernate.engine.profile.Fetch.Style fetchStyle) {
if ( collectionDescriptor instanceof FetchProfileAffectee ) {
( (FetchProfileAffectee) collectionDescriptor ).registerAffectingFetchProfile( fetchProfileName, fetchStyle );
}
}
@Override @Override
public boolean isAffectedByEnabledFetchProfiles(LoadQueryInfluencers influencers) { public boolean isAffectedByEnabledFetchProfiles(LoadQueryInfluencers influencers) {
return getCollectionDescriptor().isAffectedByEnabledFetchProfiles( influencers ); return getCollectionDescriptor().isAffectedByEnabledFetchProfiles( influencers );

View File

@ -15,18 +15,13 @@ import java.util.function.BiConsumer;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.cache.MutableCacheKeyBuilder; import org.hibernate.cache.MutableCacheKeyBuilder;
import org.hibernate.engine.spi.PersistenceContext; import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.IndexedConsumer; import org.hibernate.internal.util.IndexedConsumer;
import org.hibernate.loader.ast.internal.MultiNaturalIdLoaderStandard; import org.hibernate.loader.ast.internal.*;
import org.hibernate.loader.ast.internal.SimpleNaturalIdLoader;
import org.hibernate.loader.ast.spi.MultiNaturalIdLoader; import org.hibernate.loader.ast.spi.MultiNaturalIdLoader;
import org.hibernate.loader.ast.spi.NaturalIdLoader; import org.hibernate.loader.ast.spi.NaturalIdLoader;
import org.hibernate.metamodel.mapping.AttributeMapping; import org.hibernate.metamodel.mapping.*;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.metamodel.mapping.SelectableConsumer;
import org.hibernate.metamodel.mapping.SingularAttributeMapping;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.spi.NavigablePath; import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.spi.SqlSelection; import org.hibernate.sql.ast.spi.SqlSelection;
@ -36,11 +31,14 @@ import org.hibernate.sql.results.graph.DomainResultCreationState;
import org.hibernate.type.descriptor.java.JavaType; import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.supportsSqlArrayType;
/** /**
* Single-attribute NaturalIdMapping implementation * Single-attribute NaturalIdMapping implementation
*/ */
public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements JavaType.CoercionContext { public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements JavaType.CoercionContext {
private final SingularAttributeMapping attribute; private final SingularAttributeMapping attribute;
private final SessionFactoryImplementor sessionFactory;
private final TypeConfiguration typeConfiguration; private final TypeConfiguration typeConfiguration;
public SimpleNaturalIdMapping( public SimpleNaturalIdMapping(
@ -53,7 +51,8 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
); );
this.attribute = attribute; this.attribute = attribute;
typeConfiguration = creationProcess.getCreationContext().getTypeConfiguration(); this.sessionFactory = creationProcess.getCreationContext().getSessionFactory();
this.typeConfiguration = creationProcess.getCreationContext().getTypeConfiguration();
} }
@ -62,7 +61,11 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
} }
@Override @Override
public void verifyFlushState(Object id, Object[] currentState, Object[] loadedState, SharedSessionContractImplementor session) { public void verifyFlushState(
Object id,
Object[] currentState,
Object[] loadedState,
SharedSessionContractImplementor session) {
if ( isMutable() ) { if ( isMutable() ) {
// EARLY EXIT!!! // EARLY EXIT!!!
// the natural id is mutable (!immutable), no need to do the checks // the natural id is mutable (!immutable), no need to do the checks
@ -77,7 +80,7 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
? persistenceContext.getNaturalIdSnapshot( id, persister ) ? persistenceContext.getNaturalIdSnapshot( id, persister )
: persister.getNaturalIdMapping().extractNaturalIdFromEntityState( loadedState ); : persister.getNaturalIdMapping().extractNaturalIdFromEntityState( loadedState );
if ( ! areEqual( naturalId, snapshot, session ) ) { if ( !areEqual( naturalId, snapshot, session ) ) {
throw new HibernateException( throw new HibernateException(
String.format( String.format(
"An immutable natural identifier of entity %s was altered from `%s` to `%s`", "An immutable natural identifier of entity %s was altered from `%s` to `%s`",
@ -99,7 +102,7 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
return state[0]; return state[0];
} }
return state[ attribute.getStateArrayPosition() ]; return state[attribute.getStateArrayPosition()];
} }
@Override @Override
@ -122,7 +125,7 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
} }
} }
if ( ! getJavaType().getJavaTypeClass().isInstance( naturalIdValue ) ) { if ( !getJavaType().getJavaTypeClass().isInstance( naturalIdValue ) ) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
String.format( String.format(
Locale.ROOT, Locale.ROOT,
@ -146,7 +149,7 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
return normalizeIncomingValue( incoming ); return normalizeIncomingValue( incoming );
} }
@SuppressWarnings( "rawtypes" ) @SuppressWarnings("rawtypes")
public Object normalizeIncomingValue(Object naturalIdToLoad) { public Object normalizeIncomingValue(Object naturalIdToLoad) {
final Object normalizedValue; final Object normalizedValue;
if ( naturalIdToLoad instanceof Map ) { if ( naturalIdToLoad instanceof Map ) {
@ -195,7 +198,10 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
} }
@Override @Override
public void applySqlSelections(NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState) { public void applySqlSelections(
NavigablePath navigablePath,
TableGroup tableGroup,
DomainResultCreationState creationState) {
attribute.applySqlSelections( navigablePath, tableGroup, creationState ); attribute.applySqlSelections( navigablePath, tableGroup, creationState );
} }
@ -288,7 +294,11 @@ public class SimpleNaturalIdMapping extends AbstractNaturalIdMapping implements
@Override @Override
public MultiNaturalIdLoader<?> makeMultiLoader(EntityMappingType entityDescriptor) { public MultiNaturalIdLoader<?> makeMultiLoader(EntityMappingType entityDescriptor) {
return new MultiNaturalIdLoaderStandard<>( entityDescriptor ); boolean supportsSqlArrayType = supportsSqlArrayType( sessionFactory.getFastSessionServices().jdbcServices.getDialect() );
if ( supportsSqlArrayType && attribute instanceof BasicAttributeMapping ) {
return new MultiNaturalIdLoaderArrayParam<>( entityDescriptor );
}
return new MultiNaturalIdLoaderInPredicate<>( entityDescriptor );
} }
@Override @Override

View File

@ -41,7 +41,7 @@ import org.hibernate.engine.jdbc.mutation.internal.MutationQueryOptions;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator; import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper; import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.profile.Fetch; import org.hibernate.engine.profile.Fetch;
import org.hibernate.engine.profile.FetchProfile; import org.hibernate.engine.profile.internal.FetchProfileAffectee;
import org.hibernate.engine.spi.EntityKey; import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.ExecuteUpdateResultCheckStyle; import org.hibernate.engine.spi.ExecuteUpdateResultCheckStyle;
import org.hibernate.engine.spi.LoadQueryInfluencers; import org.hibernate.engine.spi.LoadQueryInfluencers;
@ -58,11 +58,11 @@ import org.hibernate.internal.util.StringHelper;
import org.hibernate.jdbc.Expectation; import org.hibernate.jdbc.Expectation;
import org.hibernate.jdbc.Expectations; import org.hibernate.jdbc.Expectations;
import org.hibernate.loader.ast.internal.CollectionElementLoaderByIndex; import org.hibernate.loader.ast.internal.CollectionElementLoaderByIndex;
import org.hibernate.loader.ast.internal.CollectionLoaderBatchKey;
import org.hibernate.loader.ast.internal.CollectionLoaderNamedQuery; import org.hibernate.loader.ast.internal.CollectionLoaderNamedQuery;
import org.hibernate.loader.ast.internal.CollectionLoaderSingleKey; import org.hibernate.loader.ast.internal.CollectionLoaderSingleKey;
import org.hibernate.loader.ast.internal.CollectionLoaderSubSelectFetch; import org.hibernate.loader.ast.internal.CollectionLoaderSubSelectFetch;
import org.hibernate.loader.ast.internal.LoaderSqlAstCreationState; import org.hibernate.loader.ast.internal.LoaderSqlAstCreationState;
import org.hibernate.loader.ast.spi.BatchLoaderFactory;
import org.hibernate.loader.ast.spi.CollectionLoader; import org.hibernate.loader.ast.spi.CollectionLoader;
import org.hibernate.mapping.Collection; import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
@ -145,7 +145,7 @@ import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER
*/ */
@Internal @Internal
public abstract class AbstractCollectionPersister public abstract class AbstractCollectionPersister
implements CollectionPersister, CollectionMutationTarget, PluralAttributeMappingImpl.Aware, DeprecatedCollectionStuff { implements CollectionPersister, CollectionMutationTarget, PluralAttributeMappingImpl.Aware, FetchProfileAffectee, DeprecatedCollectionStuff {
private final NavigableRole navigableRole; private final NavigableRole navigableRole;
private final CollectionSemantics<?,?> collectionSemantics; private final CollectionSemantics<?,?> collectionSemantics;
@ -232,6 +232,7 @@ public abstract class AbstractCollectionPersister
private CollectionElementLoaderByIndex collectionElementLoaderByIndex; private CollectionElementLoaderByIndex collectionElementLoaderByIndex;
private PluralAttributeMapping attributeMapping; private PluralAttributeMapping attributeMapping;
private volatile Map<String, Fetch.Style> affectingFetchProfiles;
@Deprecated(since = "6.0") @Deprecated(since = "6.0")
@ -738,13 +739,32 @@ public abstract class AbstractCollectionPersister
); );
} }
private CollectionLoader reusableCollectionLoader;
protected CollectionLoader createCollectionLoader(LoadQueryInfluencers loadQueryInfluencers) { protected CollectionLoader createCollectionLoader(LoadQueryInfluencers loadQueryInfluencers) {
final int batchSize = getBatchSize(); if ( canUseReusableCollectionLoader( loadQueryInfluencers ) ) {
if ( batchSize > 1 ) { if ( reusableCollectionLoader == null ) {
return new CollectionLoaderBatchKey( attributeMapping, batchSize, loadQueryInfluencers, getFactory() ); reusableCollectionLoader = generateCollectionLoader( LoadQueryInfluencers.NONE );
}
return reusableCollectionLoader;
} }
// create a one-off
return generateCollectionLoader( loadQueryInfluencers );
}
private boolean canUseReusableCollectionLoader(LoadQueryInfluencers loadQueryInfluencers) {
// we can reuse it so long as none of the enabled influencers affect it
return attributeMapping.isNotAffectedByInfluencers( loadQueryInfluencers );
}
private CollectionLoader generateCollectionLoader(LoadQueryInfluencers loadQueryInfluencers) {
final int batchSize = getBatchSize();
if ( batchSize > 1 ) {
return getFactory().getServiceRegistry()
.getService( BatchLoaderFactory.class )
.createCollectionBatchLoader( batchSize, loadQueryInfluencers, attributeMapping, getFactory() );
}
return new CollectionLoaderSingleKey( attributeMapping, loadQueryInfluencers, getFactory() ); return new CollectionLoaderSingleKey( attributeMapping, loadQueryInfluencers, getFactory() );
} }
@ -1502,6 +1522,29 @@ public abstract class AbstractCollectionPersister
return attributeMapping; return attributeMapping;
} }
@Override
public void registerAffectingFetchProfile(String fetchProfileName, Fetch.Style fetchStyle) {
if ( affectingFetchProfiles == null ) {
affectingFetchProfiles = new HashMap<>();
}
affectingFetchProfiles.put( fetchProfileName, fetchStyle );
}
@Override
public boolean isAffectedByEnabledFetchProfiles(LoadQueryInfluencers influencers) {
if ( affectingFetchProfiles == null ) {
return false;
}
for ( Map.Entry<String, Fetch.Style> entry : affectingFetchProfiles.entrySet() ) {
if ( influencers.isFetchProfileEnabled( entry.getKey() ) ) {
return true;
}
}
return false;
}
@Override @Override
public boolean isAffectedByEnabledFilters(LoadQueryInfluencers influencers) { public boolean isAffectedByEnabledFilters(LoadQueryInfluencers influencers) {
if ( influencers.hasEnabledFilters() ) { if ( influencers.hasEnabledFilters() ) {
@ -1519,21 +1562,6 @@ public abstract class AbstractCollectionPersister
return false; return false;
} }
@Override
public boolean isAffectedByEnabledFetchProfiles(LoadQueryInfluencers influencers) {
if ( influencers.hasEnabledFetchProfiles() ) {
for ( String enabledFetchProfileName : influencers.getEnabledFetchProfileNames() ) {
final FetchProfile fetchProfile = getFactory().getFetchProfile( enabledFetchProfileName );
final Fetch fetch = fetchProfile.getFetchByRole( getRole() );
if ( fetch != null && fetch.getStyle() == Fetch.Style.JOIN ) {
return true;
}
}
}
return false;
}
@Override @Override
public CollectionSemantics<?,?> getCollectionSemantics() { public CollectionSemantics<?,?> getCollectionSemantics() {
return collectionSemantics; return collectionSemantics;

View File

@ -75,6 +75,7 @@ import org.hibernate.engine.internal.MutableEntityEntryFactory;
import org.hibernate.engine.internal.StatefulPersistenceContext; import org.hibernate.engine.internal.StatefulPersistenceContext;
import org.hibernate.engine.jdbc.mutation.spi.MutationExecutorService; import org.hibernate.engine.jdbc.mutation.spi.MutationExecutorService;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator; import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.CachedNaturalIdValueSource; import org.hibernate.engine.spi.CachedNaturalIdValueSource;
import org.hibernate.engine.spi.CascadeStyle; import org.hibernate.engine.spi.CascadeStyle;
import org.hibernate.engine.spi.CollectionKey; import org.hibernate.engine.spi.CollectionKey;
@ -116,16 +117,19 @@ import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.internal.util.collections.LockModeEnumMap; import org.hibernate.internal.util.collections.LockModeEnumMap;
import org.hibernate.jdbc.Expectation; import org.hibernate.jdbc.Expectation;
import org.hibernate.jdbc.TooManyRowsAffectedException; import org.hibernate.jdbc.TooManyRowsAffectedException;
import org.hibernate.loader.ast.internal.MultiKeyLoadHelper;
import org.hibernate.loader.ast.internal.CacheEntityLoaderHelper; import org.hibernate.loader.ast.internal.CacheEntityLoaderHelper;
import org.hibernate.engine.profile.internal.FetchProfileAffectee;
import org.hibernate.loader.ast.internal.LoaderSelectBuilder; import org.hibernate.loader.ast.internal.LoaderSelectBuilder;
import org.hibernate.loader.ast.internal.LoaderSqlAstCreationState; import org.hibernate.loader.ast.internal.LoaderSqlAstCreationState;
import org.hibernate.loader.ast.internal.MultiIdLoaderStandard; import org.hibernate.loader.ast.internal.MultiIdEntityLoaderArrayParam;
import org.hibernate.loader.ast.internal.MultiIdEntityLoaderStandard;
import org.hibernate.loader.ast.internal.Preparable; import org.hibernate.loader.ast.internal.Preparable;
import org.hibernate.loader.ast.internal.SingleIdArrayLoadPlan; import org.hibernate.loader.ast.internal.SingleIdArrayLoadPlan;
import org.hibernate.loader.ast.internal.SingleIdEntityLoaderDynamicBatch;
import org.hibernate.loader.ast.internal.SingleIdEntityLoaderProvidedQueryImpl; import org.hibernate.loader.ast.internal.SingleIdEntityLoaderProvidedQueryImpl;
import org.hibernate.loader.ast.internal.SingleIdEntityLoaderStandardImpl; import org.hibernate.loader.ast.internal.SingleIdEntityLoaderStandardImpl;
import org.hibernate.loader.ast.internal.SingleUniqueKeyEntityLoaderStandard; import org.hibernate.loader.ast.internal.SingleUniqueKeyEntityLoaderStandard;
import org.hibernate.loader.ast.spi.BatchLoaderFactory;
import org.hibernate.loader.ast.spi.Loader; import org.hibernate.loader.ast.spi.Loader;
import org.hibernate.loader.ast.spi.MultiIdEntityLoader; import org.hibernate.loader.ast.spi.MultiIdEntityLoader;
import org.hibernate.loader.ast.spi.MultiIdLoadOptions; import org.hibernate.loader.ast.spi.MultiIdLoadOptions;
@ -309,7 +313,7 @@ import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnRefere
*/ */
@Internal @Internal
public abstract class AbstractEntityPersister public abstract class AbstractEntityPersister
implements InFlightEntityMappingType, EntityMutationTarget, LazyPropertyInitializer, PostInsertIdentityPersister, DeprecatedEntityStuff { implements InFlightEntityMappingType, EntityMutationTarget, LazyPropertyInitializer, PostInsertIdentityPersister, FetchProfileAffectee, DeprecatedEntityStuff {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( AbstractEntityPersister.class ); private static final CoreMessageLogger LOG = CoreLogging.messageLogger( AbstractEntityPersister.class );
@ -322,8 +326,8 @@ public abstract class AbstractEntityPersister
private final String sqlAliasStem; private final String sqlAliasStem;
private final SingleIdEntityLoader<?> singleIdEntityLoader; private final SingleIdEntityLoader<?> singleIdLoader;
private final MultiIdEntityLoader<?> multiIdEntityLoader; private final MultiIdEntityLoader<?> multiIdLoader;
private NaturalIdLoader<?> naturalIdLoader; private NaturalIdLoader<?> naturalIdLoader;
private MultiNaturalIdLoader<?> multiNaturalIdLoader; private MultiNaturalIdLoader<?> multiNaturalIdLoader;
@ -529,16 +533,16 @@ public abstract class AbstractEntityPersister
throw new IllegalArgumentException( "Could not resolve named load-query [" + getEntityName() throw new IllegalArgumentException( "Could not resolve named load-query [" + getEntityName()
+ "] : " + persistentClass.getLoaderName() ); + "] : " + persistentClass.getLoaderName() );
} }
singleIdEntityLoader = new SingleIdEntityLoaderProvidedQueryImpl<>( this, namedQueryMemento ); singleIdLoader = new SingleIdEntityLoaderProvidedQueryImpl<>( this, namedQueryMemento );
} }
else if ( batchSize > 1 ) { else if ( batchSize > 1 ) {
singleIdEntityLoader = createBatchingIdEntityLoader( this, batchSize, factory ); singleIdLoader = createBatchingIdEntityLoader( this, batchSize, factory );
} }
else { else {
singleIdEntityLoader = new SingleIdEntityLoaderStandardImpl<>( this, factory ); singleIdLoader = new SingleIdEntityLoaderStandardImpl<>( this, factory );
} }
multiIdEntityLoader = new MultiIdLoaderStandard<>( this, persistentClass, factory ); multiIdLoader = buildMultiIdLoader( persistentClass );
final TypeConfiguration typeConfiguration = creationContext.getTypeConfiguration(); final TypeConfiguration typeConfiguration = creationContext.getTypeConfiguration();
final SqmFunctionRegistry functionRegistry = creationContext.getFunctionRegistry(); final SqmFunctionRegistry functionRegistry = creationContext.getFunctionRegistry();
@ -801,6 +805,14 @@ public abstract class AbstractEntityPersister
fullDiscriminatorValues = toObjectArray( values ); fullDiscriminatorValues = toObjectArray( values );
} }
private MultiIdEntityLoader<Object> buildMultiIdLoader(PersistentClass persistentClass) {
if ( persistentClass.getIdentifier() instanceof BasicValue
&& MultiKeyLoadHelper.supportsSqlArrayType( factory.getServiceRegistry().getService( JdbcServices.class ).getDialect() ) ) {
return new MultiIdEntityLoaderArrayParam<>( this, factory );
}
return new MultiIdEntityLoaderStandard<>( this, persistentClass, factory );
}
private String getIdentitySelectString(Dialect dialect) { private String getIdentitySelectString(Dialect dialect) {
try { try {
return dialect.getIdentityColumnSupport() return dialect.getIdentityColumnSupport()
@ -1006,13 +1018,15 @@ public abstract class AbstractEntityPersister
private static SingleIdEntityLoader<?> createBatchingIdEntityLoader( private static SingleIdEntityLoader<?> createBatchingIdEntityLoader(
EntityMappingType entityDescriptor, EntityMappingType entityDescriptor,
int batchSize, int domainBatchSize,
SessionFactoryImplementor factory) { SessionFactoryImplementor factory) {
return new SingleIdEntityLoaderDynamicBatch<>( entityDescriptor, batchSize, factory ); return factory.getServiceRegistry()
.getService( BatchLoaderFactory.class )
.createEntityBatchLoader( domainBatchSize, entityDescriptor, factory );
} }
/** /**
* We might need to use cache invalidation is we have formulas, * We might need to use cache invalidation if we have formulas,
* dynamic update, or secondary tables. * dynamic update, or secondary tables.
* *
* @see #isCacheInvalidationRequired() * @see #isCacheInvalidationRequired()
@ -1837,7 +1851,7 @@ public abstract class AbstractEntityPersister
@Override @Override
public Object[] getDatabaseSnapshot(Object id, SharedSessionContractImplementor session) throws HibernateException { public Object[] getDatabaseSnapshot(Object id, SharedSessionContractImplementor session) throws HibernateException {
return singleIdEntityLoader.loadDatabaseSnapshot( id, session ); return singleIdLoader.loadDatabaseSnapshot( id, session );
} }
@Override @Override
@ -3323,8 +3337,8 @@ public abstract class AbstractEntityPersister
@Override @Override
public final void postInstantiate() throws MappingException { public final void postInstantiate() throws MappingException {
doLateInit(); doLateInit();
prepareLoader( singleIdEntityLoader ); prepareLoader( singleIdLoader );
prepareLoader( multiIdEntityLoader ); prepareLoader( multiIdLoader );
} }
private void prepareLoader(Loader loader) { private void prepareLoader(Loader loader) {
@ -3364,15 +3378,15 @@ public abstract class AbstractEntityPersister
} }
if ( optionalObject == null ) { if ( optionalObject == null ) {
return singleIdEntityLoader.load( id, lockOptions, readOnly, session ); return singleIdLoader.load( id, lockOptions, readOnly, session );
} }
else { else {
return singleIdEntityLoader.load( id, optionalObject, lockOptions, readOnly, session ); return singleIdLoader.load( id, optionalObject, lockOptions, readOnly, session );
} }
} }
public SingleIdEntityLoader<?> getSingleIdEntityLoader() { public SingleIdEntityLoader<?> getSingleIdLoader() {
return singleIdEntityLoader; return singleIdLoader;
} }
@Override @Override
@ -3394,7 +3408,7 @@ public abstract class AbstractEntityPersister
loaded = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache( loadEvent, this, entityKey ); loaded = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache( loadEvent, this, entityKey );
} }
if ( loaded == null ) { if ( loaded == null ) {
loaded = singleIdEntityLoader.load( identifier, entity, LockOptions.NONE, session ); loaded = singleIdLoader.load( identifier, entity, LockOptions.NONE, session );
} }
if ( loaded == null ) { if ( loaded == null ) {
@ -3426,7 +3440,7 @@ public abstract class AbstractEntityPersister
@Override @Override
public List<?> multiLoad(Object[] ids, EventSource session, MultiIdLoadOptions loadOptions) { public List<?> multiLoad(Object[] ids, EventSource session, MultiIdLoadOptions loadOptions) {
return multiIdEntityLoader.load( ids, loadOptions, session ); return multiIdLoader.load( ids, loadOptions, session );
} }
@Override @Override
@ -3437,6 +3451,11 @@ public abstract class AbstractEntityPersister
affectingFetchProfileNames.add( fetchProfileName ); affectingFetchProfileNames.add( fetchProfileName );
} }
@Override
public void registerAffectingFetchProfile(String fetchProfileName, org.hibernate.engine.profile.Fetch.Style fetchStyle) {
registerAffectingFetchProfile( fetchProfileName );
}
@Override @Override
public boolean isAffectedByEntityGraph(LoadQueryInfluencers loadQueryInfluencers) { public boolean isAffectedByEntityGraph(LoadQueryInfluencers loadQueryInfluencers) {
final RootGraphImplementor<?> graph = loadQueryInfluencers.getEffectiveEntityGraph().getGraph(); final RootGraphImplementor<?> graph = loadQueryInfluencers.getEffectiveEntityGraph().getGraph();

View File

@ -32,6 +32,7 @@ import org.hibernate.engine.transaction.jta.platform.internal.JtaPlatformInitiat
import org.hibernate.engine.transaction.jta.platform.internal.JtaPlatformResolverInitiator; import org.hibernate.engine.transaction.jta.platform.internal.JtaPlatformResolverInitiator;
import org.hibernate.event.internal.EntityCopyObserverFactoryInitiator; import org.hibernate.event.internal.EntityCopyObserverFactoryInitiator;
import org.hibernate.id.factory.internal.StandardIdentifierGeneratorFactoryInitiator; import org.hibernate.id.factory.internal.StandardIdentifierGeneratorFactoryInitiator;
import org.hibernate.loader.ast.internal.BatchLoaderFactoryInitiator;
import org.hibernate.persister.internal.PersisterClassResolverInitiator; import org.hibernate.persister.internal.PersisterClassResolverInitiator;
import org.hibernate.persister.internal.PersisterFactoryInitiator; import org.hibernate.persister.internal.PersisterFactoryInitiator;
import org.hibernate.property.access.internal.PropertyAccessStrategyResolverInitiator; import org.hibernate.property.access.internal.PropertyAccessStrategyResolverInitiator;
@ -158,6 +159,7 @@ public final class StandardServiceInitiators {
// ParameterMarkerStrategy // ParameterMarkerStrategy
serviceInitiators.add( ParameterMarkerStrategyInitiator.INSTANCE ); serviceInitiators.add( ParameterMarkerStrategyInitiator.INSTANCE );
serviceInitiators.add( BatchLoaderFactoryInitiator.INSTANCE );
serviceInitiators.trimToSize(); serviceInitiators.trimToSize();

View File

@ -54,6 +54,7 @@ import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
import org.hibernate.sql.ast.tree.predicate.ExistsPredicate; import org.hibernate.sql.ast.tree.predicate.ExistsPredicate;
import org.hibernate.sql.ast.tree.predicate.FilterPredicate; import org.hibernate.sql.ast.tree.predicate.FilterPredicate;
import org.hibernate.sql.ast.tree.predicate.GroupedPredicate; import org.hibernate.sql.ast.tree.predicate.GroupedPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.InListPredicate; import org.hibernate.sql.ast.tree.predicate.InListPredicate;
import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate; import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate;
import org.hibernate.sql.ast.tree.predicate.Junction; import org.hibernate.sql.ast.tree.predicate.Junction;
@ -188,6 +189,8 @@ public interface SqlAstWalker {
void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate); void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate);
void visitInArrayPredicate(InArrayPredicate inArrayPredicate);
void visitExistsPredicate(ExistsPredicate existsPredicate); void visitExistsPredicate(ExistsPredicate existsPredicate);
void visitJunction(Junction junction); void visitJunction(Junction junction);

View File

@ -153,6 +153,7 @@ import org.hibernate.sql.ast.tree.predicate.FilterPredicate;
import org.hibernate.sql.ast.tree.predicate.FilterPredicate.FilterFragmentParameter; import org.hibernate.sql.ast.tree.predicate.FilterPredicate.FilterFragmentParameter;
import org.hibernate.sql.ast.tree.predicate.FilterPredicate.FilterFragmentPredicate; import org.hibernate.sql.ast.tree.predicate.FilterPredicate.FilterFragmentPredicate;
import org.hibernate.sql.ast.tree.predicate.GroupedPredicate; import org.hibernate.sql.ast.tree.predicate.GroupedPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.InListPredicate; import org.hibernate.sql.ast.tree.predicate.InListPredicate;
import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate; import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate;
import org.hibernate.sql.ast.tree.predicate.Junction; import org.hibernate.sql.ast.tree.predicate.Junction;
@ -6876,6 +6877,15 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
appendSql( CLOSE_PARENTHESIS ); appendSql( CLOSE_PARENTHESIS );
} }
@Override
public void visitInArrayPredicate(InArrayPredicate inArrayPredicate) {
sqlBuffer.append( "array_contains(" );
inArrayPredicate.getArrayParameter().accept( this );
sqlBuffer.append( "," );
inArrayPredicate.getTestExpression().accept( this );
sqlBuffer.append( ')' );
}
@Override @Override
public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) { public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) {
final SqlTuple lhsTuple; final SqlTuple lhsTuple;

View File

@ -61,6 +61,7 @@ import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
import org.hibernate.sql.ast.tree.predicate.ExistsPredicate; import org.hibernate.sql.ast.tree.predicate.ExistsPredicate;
import org.hibernate.sql.ast.tree.predicate.FilterPredicate; import org.hibernate.sql.ast.tree.predicate.FilterPredicate;
import org.hibernate.sql.ast.tree.predicate.GroupedPredicate; import org.hibernate.sql.ast.tree.predicate.GroupedPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.InListPredicate; import org.hibernate.sql.ast.tree.predicate.InListPredicate;
import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate; import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate;
import org.hibernate.sql.ast.tree.predicate.Junction; import org.hibernate.sql.ast.tree.predicate.Junction;
@ -380,6 +381,12 @@ public class AbstractSqlAstWalker implements SqlAstWalker {
} }
} }
@Override
public void visitInArrayPredicate(InArrayPredicate predicate) {
predicate.getTestExpression().accept( this );
predicate.getArrayParameter().accept( this );
}
@Override @Override
public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) { public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) {
inSubQueryPredicate.getTestExpression().accept( this ); inSubQueryPredicate.getTestExpression().accept( this );

View File

@ -45,6 +45,7 @@ import org.hibernate.sql.ast.tree.from.ValuesTableReference;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement; import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
import org.hibernate.sql.ast.tree.predicate.ExistsPredicate; import org.hibernate.sql.ast.tree.predicate.ExistsPredicate;
import org.hibernate.sql.ast.tree.predicate.FilterPredicate; import org.hibernate.sql.ast.tree.predicate.FilterPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.InListPredicate; import org.hibernate.sql.ast.tree.predicate.InListPredicate;
import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate; import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate;
import org.hibernate.sql.ast.tree.select.QueryGroup; import org.hibernate.sql.ast.tree.select.QueryGroup;
@ -200,6 +201,10 @@ public class AggregateFunctionChecker extends AbstractSqlAstWalker {
public void visitInListPredicate(InListPredicate inListPredicate) { public void visitInListPredicate(InListPredicate inListPredicate) {
} }
@Override
public void visitInArrayPredicate(InArrayPredicate predicate) {
}
@Override @Override
public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) { public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) {
} }

View File

@ -59,6 +59,7 @@ import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
import org.hibernate.sql.ast.tree.predicate.ExistsPredicate; import org.hibernate.sql.ast.tree.predicate.ExistsPredicate;
import org.hibernate.sql.ast.tree.predicate.FilterPredicate; import org.hibernate.sql.ast.tree.predicate.FilterPredicate;
import org.hibernate.sql.ast.tree.predicate.GroupedPredicate; import org.hibernate.sql.ast.tree.predicate.GroupedPredicate;
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.InListPredicate; import org.hibernate.sql.ast.tree.predicate.InListPredicate;
import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate; import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate;
import org.hibernate.sql.ast.tree.predicate.Junction; import org.hibernate.sql.ast.tree.predicate.Junction;
@ -325,6 +326,12 @@ public class ExpressionReplacementWalker implements SqlAstWalker {
} }
} }
@Override
public void visitInArrayPredicate(InArrayPredicate inArrayPredicate) {
final Expression replacedTestExpression = replaceExpression( inArrayPredicate.getTestExpression() );
returnedNode = new InArrayPredicate( replacedTestExpression, inArrayPredicate.getArrayParameter() );
}
@Override @Override
public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) { public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) {
final Expression testExpression = replaceExpression( inSubQueryPredicate.getTestExpression() ); final Expression testExpression = replaceExpression( inSubQueryPredicate.getTestExpression() );

View File

@ -0,0 +1,43 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.sql.ast.tree.predicate;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.sql.ast.SqlAstWalker;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
/**
* @author Steve Ebersole
*/
public class InArrayPredicate extends AbstractPredicate {
private final Expression testExpression;
private final JdbcParameter arrayParameter;
public InArrayPredicate(Expression testExpression, JdbcParameter arrayParameter, JdbcMappingContainer expressionType) {
super( expressionType );
this.testExpression = testExpression;
this.arrayParameter = arrayParameter;
}
public InArrayPredicate(Expression testExpression, JdbcParameter arrayParameter) {
this( testExpression, arrayParameter, null );
}
public Expression getTestExpression() {
return testExpression;
}
public JdbcParameter getArrayParameter() {
return arrayParameter;
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitInArrayPredicate( this );
}
}

View File

@ -12,7 +12,7 @@ import java.util.List;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.AfterLoadAction; import org.hibernate.loader.ast.spi.AfterLoadAction;
import org.hibernate.persister.entity.Loadable; import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.sql.exec.spi.Callback; import org.hibernate.sql.exec.spi.Callback;
/** /**
@ -32,9 +32,9 @@ public class CallbackImpl implements Callback {
} }
@Override @Override
public void invokeAfterLoadActions(SharedSessionContractImplementor session, Object entity, Loadable persister) { public void invokeAfterLoadActions(Object entity, EntityMappingType entityMappingType, SharedSessionContractImplementor session) {
for ( int i = 0; i < afterLoadActions.size(); i++ ) { for ( int i = 0; i < afterLoadActions.size(); i++ ) {
afterLoadActions.get( i ).afterLoad( session, entity, persister ); afterLoadActions.get( i ).afterLoad( entity, entityMappingType, session );
} }
} }
} }

View File

@ -8,7 +8,7 @@ package org.hibernate.sql.exec.internal;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.AfterLoadAction; import org.hibernate.loader.ast.spi.AfterLoadAction;
import org.hibernate.persister.entity.Loadable; import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.sql.exec.spi.Callback; import org.hibernate.sql.exec.spi.Callback;
/** /**
@ -26,7 +26,7 @@ public class CallbackNoOp implements Callback {
} }
@Override @Override
public void invokeAfterLoadActions(SharedSessionContractImplementor session, Object entity, Loadable persister) { public void invokeAfterLoadActions(Object entity, EntityMappingType entityMappingType, SharedSessionContractImplementor session) {
// don't do anything // don't do anything
} }
} }

View File

@ -55,7 +55,7 @@ public class JdbcParameterBindingsImpl implements JdbcParameterBindings {
final Dialect dialect = factory.getJdbcServices().getDialect(); final Dialect dialect = factory.getJdbcServices().getDialect();
final boolean paddingEnabled = factory.getSessionFactoryOptions().inClauseParameterPaddingEnabled(); final boolean paddingEnabled = factory.getSessionFactoryOptions().inClauseParameterPaddingEnabled();
final int inExprLimit = dialect.getInExpressionCountLimit(); final int inExprLimit = dialect.getParameterCountLimit();
for ( ParameterOccurrence occurrence : parameterOccurrences ) { for ( ParameterOccurrence occurrence : parameterOccurrences ) {
final QueryParameterImplementor<?> param = occurrence.getParameter(); final QueryParameterImplementor<?> param = occurrence.getParameter();

View File

@ -4,12 +4,11 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later * License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html * See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/ */
package org.hibernate.sql.exec.spi; package org.hibernate.sql.exec.spi;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.AfterLoadAction; import org.hibernate.loader.ast.spi.AfterLoadAction;
import org.hibernate.persister.entity.Loadable; import org.hibernate.metamodel.mapping.EntityMappingType;
/** /**
* Callback to allow SQM interpretation to trigger certain things within ORM. See the current * Callback to allow SQM interpretation to trigger certain things within ORM. See the current
@ -19,7 +18,22 @@ import org.hibernate.persister.entity.Loadable;
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public interface Callback { public interface Callback {
/**
* Register a callback action
*/
void registerAfterLoadAction(AfterLoadAction afterLoadAction); void registerAfterLoadAction(AfterLoadAction afterLoadAction);
void invokeAfterLoadActions(SharedSessionContractImplementor session, Object entity, Loadable persister); /**
* Invoke all {@linkplain #registerAfterLoadAction registered} actions
*/
void invokeAfterLoadActions(Object entity, EntityMappingType entityMappingType, SharedSessionContractImplementor session);
/**
* @deprecated Use the {@linkplain #invokeAfterLoadActions(Object, EntityMappingType, SharedSessionContractImplementor) updated signature}
*/
@SuppressWarnings("removal")
@Deprecated(since = "6", forRemoval = true)
default void invokeAfterLoadActions(SharedSessionContractImplementor session, Object entity, org.hibernate.persister.entity.Loadable persister) {
invokeAfterLoadActions( entity, persister, session );
}
} }

View File

@ -107,13 +107,12 @@ public class DeferredResultSetAccess extends AbstractResultSetAccess {
lockOptionsToUse.setTimeOut( lockOptions.getTimeOut() ); lockOptionsToUse.setTimeOut( lockOptions.getTimeOut() );
lockOptionsToUse.setScope( lockOptions.getScope() ); lockOptionsToUse.setScope( lockOptions.getScope() );
executionContext.getCallback().registerAfterLoadAction( executionContext.getCallback().registerAfterLoadAction( (entity, persister, session) ->
(session, entity, persister) -> session.asSessionImplementor().lock(
session.asSessionImplementor().lock( persister.getEntityName(),
persister.getEntityName(), entity,
entity, lockOptionsToUse
lockOptionsToUse )
)
); );
} }
} }

View File

@ -21,19 +21,16 @@ import org.hibernate.event.spi.EventSource;
import org.hibernate.event.spi.PostLoadEvent; import org.hibernate.event.spi.PostLoadEvent;
import org.hibernate.event.spi.PostLoadEventListener; import org.hibernate.event.spi.PostLoadEventListener;
import org.hibernate.event.spi.PreLoadEvent; import org.hibernate.event.spi.PreLoadEvent;
import org.hibernate.persister.entity.Loadable;
import org.hibernate.sql.exec.spi.Callback;
import org.hibernate.sql.results.graph.Initializer;
import org.hibernate.sql.results.graph.collection.internal.ArrayInitializer;
import org.hibernate.query.spi.QueryOptions; import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.exec.spi.Callback;
import org.hibernate.sql.exec.spi.ExecutionContext; import org.hibernate.sql.exec.spi.ExecutionContext;
import org.hibernate.sql.results.graph.Initializer;
import org.hibernate.sql.results.graph.collection.CollectionInitializer; import org.hibernate.sql.results.graph.collection.CollectionInitializer;
import org.hibernate.sql.results.graph.collection.LoadingCollectionEntry;
import org.hibernate.sql.results.graph.collection.internal.ArrayInitializer;
import org.hibernate.sql.results.graph.entity.LoadingEntityEntry;
import org.hibernate.sql.results.jdbc.spi.JdbcValuesSourceProcessingOptions; import org.hibernate.sql.results.jdbc.spi.JdbcValuesSourceProcessingOptions;
import org.hibernate.sql.results.jdbc.spi.JdbcValuesSourceProcessingState; import org.hibernate.sql.results.jdbc.spi.JdbcValuesSourceProcessingState;
import org.hibernate.sql.results.graph.collection.LoadingCollectionEntry;
import org.hibernate.sql.results.graph.entity.LoadingEntityEntry;
import org.jboss.logging.Logger;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
@ -188,9 +185,9 @@ public class JdbcValuesSourceProcessingStateStandardImpl implements JdbcValuesSo
final Callback callback = executionContext.getCallback(); final Callback callback = executionContext.getCallback();
if ( callback != null ) { if ( callback != null ) {
callback.invokeAfterLoadActions( callback.invokeAfterLoadActions(
getSession(),
loadingEntityEntry.getEntityInstance(), loadingEntityEntry.getEntityInstance(),
(Loadable) loadingEntityEntry.getDescriptor() loadingEntityEntry.getDescriptor(),
getSession()
); );
} }
} }

View File

@ -9,6 +9,7 @@ import java.util.Set;
import org.hibernate.Hibernate; import org.hibernate.Hibernate;
import org.hibernate.annotations.BatchSize; import org.hibernate.annotations.BatchSize;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.jdbc.SQLStatementInspector; import org.hibernate.testing.jdbc.SQLStatementInspector;
@ -30,6 +31,7 @@ import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne; import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany; import jakarta.persistence.OneToMany;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import org.assertj.core.api.Assertions;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
@ -100,14 +102,20 @@ public class BatchAndClassIdAndLazyCollectionTest {
c.getParent().getName(); c.getParent().getName();
} }
statementInspector.assertExecutedCount( 2 ); statementInspector.assertExecutedCount( 2 );
assertThat( statementInspector.getSqlQueries()
.get( 0 ) if ( scope.getSessionFactory().getJdbcServices().getDialect().supportsStandardArrays() ) {
.toLowerCase( Locale.ROOT ) Assertions.assertThat( statementInspector.getSqlQueries().get( 0 ) ).containsOnlyOnce( "?" );
.contains( "in(?,?,?,?,?)" ) ).isTrue(); Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "?" );
assertThat( statementInspector.getSqlQueries() }
.get( 1 ) else {
.toLowerCase( Locale.ROOT ) final int batchSize = scope.getSessionFactory()
.contains( "in(?,?,?,?,?)" ) ).isTrue(); .getJdbcServices()
.getDialect()
.getBatchLoadSizingStrategy()
.determineOptimalBatchLoadSize( 1, 5, false );
Assertions.assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 0 ), '?' ) ).isEqualTo( batchSize );
Assertions.assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 1 ), '?' ) ).isEqualTo( batchSize );
}
} }
); );
} }

View File

@ -29,6 +29,7 @@ import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne; import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany; import jakarta.persistence.OneToMany;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import org.assertj.core.api.Assertions;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
@ -94,14 +95,15 @@ public class BatchAndClassIdTest {
statementInspector.clear(); statementInspector.clear();
List<Child> children = session.createQuery( "select c from Child c", Child.class ).getResultList(); List<Child> children = session.createQuery( "select c from Child c", Child.class ).getResultList();
statementInspector.assertExecutedCount( 3 ); statementInspector.assertExecutedCount( 3 );
assertThat( statementInspector.getSqlQueries() if ( scope.getSessionFactory().getJdbcServices().getDialect().supportsStandardArrays() ) {
.get( 1 ) Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "?" );
.toLowerCase( Locale.ROOT ) Assertions.assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "?" );
.contains( "in(?,?,?,?,?)" ) ).isTrue(); }
assertThat( statementInspector.getSqlQueries() else {
.get( 2 ) Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
.toLowerCase( Locale.ROOT ) Assertions.assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
.contains( "in(?,?,?,?,?)" ) ).isTrue(); }
statementInspector.clear(); statementInspector.clear();
for ( Child c : children ) { for ( Child c : children ) {
c.getParent().getName(); c.getParent().getName();

View File

@ -30,6 +30,7 @@ import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne; import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany; import jakarta.persistence.OneToMany;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import org.assertj.core.api.Assertions;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
@ -94,14 +95,15 @@ public class BatchAndEmbeddedIdId2Test {
statementInspector.clear(); statementInspector.clear();
List<Child> children = session.createQuery( "select c from Child c", Child.class ).getResultList(); List<Child> children = session.createQuery( "select c from Child c", Child.class ).getResultList();
statementInspector.assertExecutedCount( 3 ); statementInspector.assertExecutedCount( 3 );
assertThat( statementInspector.getSqlQueries() if ( scope.getSessionFactory().getJdbcServices().getDialect().supportsStandardArrays() ) {
.get( 1 ) Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "?" );
.toLowerCase( Locale.ROOT ) Assertions.assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "?" );
.contains( "in(?,?,?,?,?)" ) ).isTrue(); }
assertThat( statementInspector.getSqlQueries() else {
.get( 2 ) Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
.toLowerCase( Locale.ROOT ) Assertions.assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
.contains( "in(?,?,?,?,?)" ) ).isTrue(); }
statementInspector.clear(); statementInspector.clear();
for ( Child c : children ) { for ( Child c : children ) {
c.getParent().getName(); c.getParent().getName();

View File

@ -32,6 +32,7 @@ import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne; import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany; import jakarta.persistence.OneToMany;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import org.assertj.core.api.Assertions;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
@ -101,14 +102,15 @@ public class BatchAndEmbeddedIdIdAndLazyCollectionTest {
c.getParent().getName(); c.getParent().getName();
} }
statementInspector.assertExecutedCount( 2 ); statementInspector.assertExecutedCount( 2 );
assertThat( statementInspector.getSqlQueries()
.get( 0 ) if ( scope.getSessionFactory().getJdbcServices().getDialect().supportsStandardArrays() ) {
.toLowerCase( Locale.ROOT ) Assertions.assertThat( statementInspector.getSqlQueries().get( 0 ) ).containsOnlyOnce( "?" );
.contains( "in(?,?,?,?,?)" ) ).isTrue(); Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "?" );
assertThat( statementInspector.getSqlQueries() }
.get( 1 ) else {
.toLowerCase( Locale.ROOT ) Assertions.assertThat( statementInspector.getSqlQueries().get( 0 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
.contains( "in(?,?,?,?,?)" ) ).isTrue(); Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
}
} }
); );
} }

View File

@ -31,7 +31,7 @@ import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany; import jakarta.persistence.OneToMany;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@ -89,26 +89,25 @@ public class BatchAndEmbeddedIdIdTest {
@Test @Test
public void testSelectChild(SessionFactoryScope scope) { public void testSelectChild(SessionFactoryScope scope) {
SQLStatementInspector statementInspector = (SQLStatementInspector) scope.getStatementInspector(); SQLStatementInspector statementInspector = (SQLStatementInspector) scope.getStatementInspector();
scope.inTransaction( scope.inTransaction( (session) -> {
session -> { statementInspector.clear();
statementInspector.clear(); List<Child> children = session.createQuery( "select c from Child c", Child.class ).getResultList();
List<Child> children = session.createQuery( "select c from Child c", Child.class ).getResultList(); statementInspector.assertExecutedCount( 3 );
statementInspector.assertExecutedCount( 3 ); if ( scope.getSessionFactory().getJdbcServices().getDialect().supportsStandardArrays() ) {
assertThat( statementInspector.getSqlQueries() assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "?" );
.get( 1 ) assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "?" );
.toLowerCase( Locale.ROOT ) }
.contains( "in(?,?,?,?,?)" ) ).isTrue(); else {
assertThat( statementInspector.getSqlQueries() assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
.get( 2 ) assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "in(?,?,?,?,?)" );
.toLowerCase( Locale.ROOT ) }
.contains( "in(?,?,?,?,?)" ) ).isTrue();
statementInspector.clear(); statementInspector.clear();
for ( Child c : children ) { for ( Child c : children ) {
c.getParent().getName(); c.getParent().getName();
} }
statementInspector.assertExecutedCount( 0 ); statementInspector.assertExecutedCount( 0 );
} } );
);
} }
@Test @Test

View File

@ -0,0 +1,38 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.orm.test.batch;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.loader.ast.spi.MultiKeyLoadSizingStrategy;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.ServiceRegistryScope;
import org.junit.jupiter.api.Test;
import static java.util.Locale.ROOT;
/**
* @author Steve Ebersole
*/
@ServiceRegistry
public class MultiKeyLoadSizingStrategyTests {
@Test
public void testSizes(ServiceRegistryScope scope) {
scope.withService( JdbcServices.class, (jdbcServices) -> {
final MultiKeyLoadSizingStrategy sizingStrategy = jdbcServices.getDialect().getBatchLoadSizingStrategy();
check( 1, 1000, true, sizingStrategy );
check( 1, 1000, false, sizingStrategy );
} );
}
private void check(int columns, int size, boolean pad, MultiKeyLoadSizingStrategy sizingStrategy) {
final int value = sizingStrategy.determineOptimalBatchLoadSize( columns, size, pad );
System.out.printf( ROOT, "(%s, %s, %s) - %s%n", columns, size, pad, value );
}
}

View File

@ -8,14 +8,12 @@ package org.hibernate.orm.test.batchfetch;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
import jakarta.persistence.FetchType; import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import jakarta.persistence.OneToOne; import jakarta.persistence.OneToOne;
@Entity @Entity
public class A { public class A {
@Id @Id
@GeneratedValue
private Integer id; private Integer id;
private String otherProperty; private String otherProperty;
@ -23,6 +21,13 @@ public class A {
@OneToOne(fetch = FetchType.LAZY) @OneToOne(fetch = FetchType.LAZY)
private B b; private B b;
public A() {
}
public A(Integer id) {
this.id = id;
}
public Integer getId() { public Integer getId() {
return id; return id;
} }

View File

@ -25,6 +25,14 @@ public class B {
private String otherProperty; private String otherProperty;
public B() {
}
public B(Integer idPart1, Integer idPart2) {
this.idPart1 = idPart1;
this.idPart2 = idPart2;
}
public Integer getIdPart1() { public Integer getIdPart1() {
return idPart1; return idPart1;
} }

View File

@ -15,6 +15,14 @@ public class BId
private Integer idPart1; private Integer idPart1;
private Integer idPart2; private Integer idPart2;
public BId() {
}
public BId(Integer idPart1, Integer idPart2) {
this.idPart1 = idPart1;
this.idPart2 = idPart2;
}
public Integer getIdPart1() { public Integer getIdPart1() {
return idPart1; return idPart1;
} }
@ -35,4 +43,4 @@ public class BId
public String toString() { public String toString() {
return "BId (" + idPart1 + ", " + idPart2 + ")"; return "BId (" + idPart1 + ", " + idPart2 + ")";
} }
} }

View File

@ -0,0 +1,166 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
*/
package org.hibernate.orm.test.batchfetch;
import org.hibernate.Hibernate;
import org.hibernate.annotations.BatchSize;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.loader.ast.internal.MultiKeyLoadHelper;
import org.hibernate.testing.jdbc.SQLStatementInspector;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Basic;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Steve Ebersole
*/
public class BatchFetchStrategyLoadingTests {
@Test
@DomainModel( annotatedClasses = {
BatchFetchStrategyLoadingTests.Thing1.class,
BatchFetchStrategyLoadingTests.Thing2.class
})
@SessionFactory( useCollectingStatementInspector = true )
public void testIt(SessionFactoryScope scope) {
final SQLStatementInspector statementInspector = scope.getCollectingStatementInspector();
statementInspector.clear();
scope.inTransaction( (session) -> {
final Thing2 thing21 = session.getReference( Thing2.class, 1 );
final Thing2 thing22 = session.getReference( Thing2.class, 2 );
final Thing2 thing23 = session.getReference( Thing2.class, 3 );
assertThat( statementInspector.getSqlQueries() ).isEmpty();
assertThat( Hibernate.isInitialized( thing21 ) ).isFalse();
assertThat( Hibernate.isInitialized( thing22 ) ).isFalse();
assertThat( Hibernate.isInitialized( thing23 ) ).isFalse();
final String name = thing21.getName();
assertThat( name ).isEqualTo( "thing-2.1" );
assertThat( statementInspector.getSqlQueries() ).hasSize( 1 );
if ( MultiKeyLoadHelper.supportsSqlArrayType( scope.getSessionFactory().getJdbcServices().getDialect() ) ) {
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 0 ), "?" ) ).isEqualTo( 1 );
}
else {
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 0 ), "?" ) ).isEqualTo( 64 );
}
assertThat( Hibernate.isInitialized( thing21 ) ).isTrue();
assertThat( Hibernate.isInitialized( thing22 ) ).isTrue();
assertThat( Hibernate.isInitialized( thing23 ) ).isTrue();
} );
}
@BeforeEach
public void createTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Thing2 thing2 = new Thing2( 1, "thing-2.1" );
final Thing1 thing1 = new Thing1( 1, "thing-1", thing2 );
session.persist( thing2 );
session.persist( thing1 );
session.persist( new Thing2( 2, "thing-2.2" ) );
session.persist( new Thing2( 3, "thing-2.3" ) );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createMutationQuery( "delete Thing1" ).executeUpdate();
session.createMutationQuery( "delete Thing2" ).executeUpdate();
} );
}
@Entity( name = "Thing1" )
@Table( name = "Thing1" )
public static class Thing1 {
@Id
private Integer id;
@Basic
private String name;
@ManyToOne
@JoinColumn( name = "thing2_fk" )
private Thing2 thing2;
protected Thing1() {
// for use by Hibernate
}
public Thing1(Integer id, String name, Thing2 thing2) {
this.id = id;
this.name = name;
this.thing2 = thing2;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Thing2 getThing2() {
return thing2;
}
public void setThing2(Thing2 thing2) {
this.thing2 = thing2;
}
}
@Entity( name = "Thing2" )
@Table( name = "Thing2" )
@BatchSize( size = 64 )
public static class Thing2 {
@Id
private Integer id;
@Basic
private String name;
protected Thing2() {
// for use by Hibernate
}
public Thing2(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}

View File

@ -9,15 +9,19 @@ package org.hibernate.orm.test.batchfetch;
import java.util.List; import java.util.List;
import org.hibernate.Hibernate; import org.hibernate.Hibernate;
import org.hibernate.annotations.BatchSize;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.ServiceRegistry; import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope; import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting; import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@ -34,6 +38,8 @@ public class DynamicBatchFetchTest {
@Test @Test
public void testDynamicBatchFetch(SessionFactoryScope scope) { public void testDynamicBatchFetch(SessionFactoryScope scope) {
currentId = 1;
Integer aId1 = createAAndB( "foo_1", scope ); Integer aId1 = createAAndB( "foo_1", scope );
Integer aId2 = createAAndB( "foo_2", scope ); Integer aId2 = createAAndB( "foo_2", scope );
@ -41,6 +47,7 @@ public class DynamicBatchFetchTest {
session -> { session -> {
List resultList = session.createQuery( "from A where id in (" + aId1 + "," + aId2 + ") order by id" ) List resultList = session.createQuery( "from A where id in (" + aId1 + "," + aId2 + ") order by id" )
.list(); .list();
assertThat( resultList ).isNotEmpty();
A a1 = (A) resultList.get( 0 ); A a1 = (A) resultList.get( 0 );
A a2 = (A) resultList.get( 1 ); A a2 = (A) resultList.get( 1 );
assertEquals( aId1, a1.getId() ); assertEquals( aId1, a1.getId() );
@ -60,17 +67,14 @@ public class DynamicBatchFetchTest {
private int createAAndB(String otherProperty, SessionFactoryScope scope) { private int createAAndB(String otherProperty, SessionFactoryScope scope) {
scope.inTransaction( scope.inTransaction(
session -> { session -> {
B b = new B(); B b = new B( currentId, currentId );
b.setIdPart1( currentId );
b.setIdPart2( currentId );
b.setOtherProperty( otherProperty ); b.setOtherProperty( otherProperty );
session.save( b ); session.persist( b );
A a = new A(); A a = new A( currentId );
a.setId( currentId );
a.setB( b ); a.setB( b );
session.save( a ); session.persist( a );
} }
); );
@ -78,4 +82,41 @@ public class DynamicBatchFetchTest {
return currentId - 1; return currentId - 1;
} }
/**
* Tests the handling of multi-loads with regard to BatchLoadSizingStrategy and correlation
* to batch fetching. Show that both suffer from the previous behavior of always using the
* mapped {@link BatchSize#size()} which leads to predicates with too many parameters with
* large batch-sizes
*/
@Test
public void testMultiLoad(SessionFactoryScope scope) {
final List<BId> bIdList = CollectionHelper.arrayList( 2000 );
scope.inTransaction( (session) -> {
for ( int i = 0; i < 2000; i++ ) {
bIdList.add( new BId( i, i ) );
final B b = new B( i, i );
session.persist( b );
A a = new A( i );
a.setB( b );
session.persist( b );
session.persist( a );
}
} );
scope.inTransaction( (session) -> {
final List<B> list = session.byMultipleIds( B.class ).multiLoad( bIdList );
assertThat( list ).isNotEmpty();
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createQuery( "delete A" ).executeUpdate();
session.createQuery( "delete B" ).executeUpdate();
} );
}
} }

View File

@ -10,6 +10,8 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.loader.ast.internal.MultiKeyLoadHelper;
import org.hibernate.testing.jdbc.SQLStatementInspector; import org.hibernate.testing.jdbc.SQLStatementInspector;
import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.DomainModel;
@ -30,6 +32,7 @@ import jakarta.persistence.OrderBy;
import static jakarta.persistence.CascadeType.ALL; import static jakarta.persistence.CascadeType.ALL;
import static jakarta.persistence.FetchType.LAZY; import static jakarta.persistence.FetchType.LAZY;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
/** /**
@ -111,18 +114,24 @@ public class NestedLazyManyToOneTest {
else { else {
Set<Entity3> children3 = child2.getChildren(); Set<Entity3> children3 = child2.getChildren();
if ( child2.getId().equals( "0_0" ) ) { if ( child2.getId().equals( "0_0" ) ) {
assertEquals( 5, children3.size() ); assertEquals( 5, children3.size(), "Size of `Child2(0_0).children3` did not match expectation" );
} }
else { else {
assertEquals( 0, children3.size() ); assertEquals( 0, children3.size(), "Size of `Child2(" + child2.getId() + ").children3` did not match expectation" );
} }
} }
} }
assertEquals( 8, entity1.getChildren().size() ); assertEquals( 8, entity1.getChildren().size() );
statementInspector.assertExecutedCount( 3 ); // 1 for Entity1, 1 for Entity2, 1 for Entity3 // 1 for Entity1, 1 for Entity2, 1 for Entity3
statementInspector.assertExecutedCount( 3 );
statementInspector.assertNumberOfOccurrenceInQueryNoSpace( 1, QUESTION_MARK, 1 ); statementInspector.assertNumberOfOccurrenceInQueryNoSpace( 1, QUESTION_MARK, 1 );
statementInspector.assertNumberOfOccurrenceInQueryNoSpace( 2, QUESTION_MARK, 5 ); if ( MultiKeyLoadHelper.supportsSqlArrayType( scope.getSessionFactory().getJdbcServices().getDialect() ) ) {
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 2 ), '?' ) ).isEqualTo( 1 );
}
else {
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 2 ), '?' ) ).isEqualTo( 5 );
}
} ); } );
} }
@ -143,11 +152,18 @@ public class NestedLazyManyToOneTest {
} }
} }
assertEquals( 8, entity1.getChildren().size() ); assertThat( entity1.getChildren() ).hasSize( 8 );
statementInspector.assertExecutedCount( 4 ); // 1 for Entity1, 1 for Entity2, 2 for Entity3 // 1 for Entity1, 1 for Entity2, 2 for Entity3
statementInspector.assertNumberOfOccurrenceInQueryNoSpace( 1, QUESTION_MARK, 1 ); assertThat( statementInspector.getSqlQueries() ).hasSize( 4 );
statementInspector.assertNumberOfOccurrenceInQueryNoSpace( 2, QUESTION_MARK, 5 ); assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 1 ), '?' ) ).isEqualTo( 1 );
statementInspector.assertNumberOfOccurrenceInQueryNoSpace( 3, QUESTION_MARK, 3 ); if ( MultiKeyLoadHelper.supportsSqlArrayType( scope.getSessionFactory().getJdbcServices().getDialect() ) ) {
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 2 ), '?' ) ).isEqualTo( 1 );
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 3 ), '?' ) ).isEqualTo( 1 );
}
else {
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 2 ), '?' ) ).isEqualTo( 5 );
assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 3 ), '?' ) ).isEqualTo( 5 );
}
} ); } );
} }

View File

@ -17,6 +17,7 @@ import org.hibernate.HibernateException;
import org.hibernate.annotations.Type; import org.hibernate.annotations.Type;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.usertype.EnhancedUserType;
import org.hibernate.usertype.UserType; import org.hibernate.usertype.UserType;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
@ -122,7 +123,7 @@ public class UserTypeComparableIdTest {
} }
} }
public static class CustomIdType implements UserType<CustomId>, Comparator<CustomId> { public static class CustomIdType implements EnhancedUserType<CustomId>, Comparator<CustomId> {
@Override @Override
public int getSqlType() { public int getSqlType() {
@ -195,5 +196,34 @@ public class UserTypeComparableIdTest {
public CustomId replace(CustomId original, CustomId target, Object owner) throws HibernateException { public CustomId replace(CustomId original, CustomId target, Object owner) throws HibernateException {
return original; return original;
} }
@Override
public String toSqlLiteral(CustomId value) {
return toString( value );
}
@Override
public String toString(CustomId customId) throws HibernateException {
if ( customId == null ) {
return null;
}
final Long longValue = customId.getValue();
if ( longValue == null ) {
return null;
}
return longValue.toString();
}
@Override
public CustomId fromStringValue(CharSequence sequence) throws HibernateException {
if ( sequence == null ) {
return null;
}
final long longValue = Long.parseLong( sequence.toString() );
return new CustomId( longValue );
}
} }
} }

View File

@ -20,6 +20,7 @@ import jakarta.persistence.Table;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.annotations.Type; import org.hibernate.annotations.Type;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.usertype.EnhancedUserType;
import org.hibernate.usertype.UserType; import org.hibernate.usertype.UserType;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
@ -109,7 +110,7 @@ public class UserTypeNonComparableIdTest {
} }
} }
public static class CustomIdType implements UserType<CustomId> { public static class CustomIdType implements EnhancedUserType<CustomId> {
@Override @Override
public int getSqlType() { public int getSqlType() {
@ -177,5 +178,34 @@ public class UserTypeNonComparableIdTest {
public CustomId replace(CustomId original, CustomId target, Object owner) throws HibernateException { public CustomId replace(CustomId original, CustomId target, Object owner) throws HibernateException {
return original; return original;
} }
@Override
public String toSqlLiteral(CustomId customId) {
return toString( customId );
}
@Override
public String toString(CustomId customId) throws HibernateException {
if ( customId == null ) {
return null;
}
final Long longValue = customId.getValue();
if ( longValue == null ) {
return null;
}
return longValue.toString();
}
@Override
public CustomId fromStringValue(CharSequence sequence) throws HibernateException {
if ( sequence == null ) {
return null;
}
final long longValue = Long.parseLong( sequence.toString() );
return new CustomId( longValue );
}
} }
} }

View File

@ -60,7 +60,7 @@ public class LoadingSmokeTests {
.getRuntimeMetamodels().getMappingMetamodel() .getRuntimeMetamodels().getMappingMetamodel()
.getEntityDescriptor( BasicEntity.class ); .getEntityDescriptor( BasicEntity.class );
final SingleIdEntityLoader singleIdEntityLoader = entityDescriptor.getSingleIdEntityLoader(); final SingleIdEntityLoader singleIdEntityLoader = entityDescriptor.getSingleIdLoader();
assertThat( singleIdEntityLoader, instanceOf( SingleIdEntityLoaderStandardImpl.class ) ); assertThat( singleIdEntityLoader, instanceOf( SingleIdEntityLoaderStandardImpl.class ) );
assertThat( assertThat(
( (SingleIdEntityLoaderStandardImpl) singleIdEntityLoader ).getNonReusablePlansGenerated().get(), ( (SingleIdEntityLoaderStandardImpl) singleIdEntityLoader ).getNonReusablePlansGenerated().get(),

View File

@ -10,10 +10,13 @@ import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode; import org.hibernate.annotations.FetchMode;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.loader.ast.internal.MultiKeyLoadHelper;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.jdbc.SQLStatementInspector;
import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialect; import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.ServiceRegistry; import org.hibernate.testing.orm.junit.ServiceRegistry;
@ -34,6 +37,7 @@ import jakarta.persistence.OneToMany;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import static jakarta.persistence.GenerationType.AUTO; import static jakarta.persistence.GenerationType.AUTO;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@ -45,7 +49,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
@ServiceRegistry( @ServiceRegistry(
settingProviders = @SettingProvider( provider = MultiLoadSubSelectCollectionDialectWithLimitTest.TestSettingProvider.class, settingName = AvailableSettings.DIALECT) settingProviders = @SettingProvider( provider = MultiLoadSubSelectCollectionDialectWithLimitTest.TestSettingProvider.class, settingName = AvailableSettings.DIALECT)
) )
@SessionFactory(generateStatistics = true) @SessionFactory(generateStatistics = true, useCollectingStatementInspector = true)
@RequiresDialect( H2Dialect.class ) @RequiresDialect( H2Dialect.class )
public class MultiLoadSubSelectCollectionDialectWithLimitTest { public class MultiLoadSubSelectCollectionDialectWithLimitTest {
@ -104,37 +108,52 @@ public class MultiLoadSubSelectCollectionDialectWithLimitTest {
@Test @Test
@TestForIssue(jiraKey = "HHH-12740") @TestForIssue(jiraKey = "HHH-12740")
public void testSubselect(SessionFactoryScope scope) { public void testSubselect(SessionFactoryScope scope) {
final SQLStatementInspector statementInspector = scope.getCollectingStatementInspector();
statementInspector.clear();
scope.inTransaction( scope.inTransaction(
session -> { session -> {
List<Parent> list = session.byMultipleIds( Parent.class ).multiLoad( ids( 56 ) ); List<Parent> list = session.byMultipleIds( Parent.class ).multiLoad( ids( 56 ) );
assertEquals( 56, list.size() ); assertEquals( 56, list.size() );
// None of the collections should be loaded yet // None of the collections should be loaded yet
assertThat( statementInspector.getSqlQueries() ).hasSize( 1 );
for ( Parent p : list ) { for ( Parent p : list ) {
assertFalse( Hibernate.isInitialized( p.children ) ); assertFalse( Hibernate.isInitialized( p.children ) );
} }
// When the first collection is loaded, the full batch of 50 collections statementInspector.clear();
// should be loaded.
// When the first collection is loaded multiple will be initialized
Hibernate.initialize( list.get( 0 ).children ); Hibernate.initialize( list.get( 0 ).children );
for ( int i = 0; i < 50; i++ ) { // exactly how depends on whether the Dialect supports use of SQL ARRAY
assertTrue( Hibernate.isInitialized( list.get( i ).children ) ); final Dialect dialect = scope.getSessionFactory().getFastSessionServices().jdbcServices.getDialect();
assertEquals( i + 1, list.get( i ).children.size() ); if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( Hibernate.isInitialized( list.get( 0 ).children ) ).isTrue();
assertThat( Hibernate.isInitialized( list.get( 50 ).children ) ).isTrue();
assertThat( Hibernate.isInitialized( list.get( 52 ).children ) ).isTrue();
assertThat( statementInspector.getSqlQueries() ).hasSize( 1 );
} }
else {
for ( int i = 0; i < 50; i++ ) {
assertTrue( Hibernate.isInitialized( list.get( i ).children ) );
assertEquals( i + 1, list.get( i ).children.size() );
}
// The collections for the 51st through 56th entities should still be uninitialized // The collections for the 51st through 56th entities should still be uninitialized
for ( int i = 50; i < 56; i++ ) { for ( int i = 50; i < 56; i++ ) {
assertFalse( Hibernate.isInitialized( list.get( i ).children ) ); assertFalse( Hibernate.isInitialized( list.get( i ).children ) );
} }
// When the 51st collection gets initialized, the remaining collections should // When the 51st collection gets initialized, the remaining collections should
// also be initialized. // also be initialized.
Hibernate.initialize( list.get( 50 ).children ); Hibernate.initialize( list.get( 50 ).children );
for ( int i = 50; i < 56; i++ ) { for ( int i = 50; i < 56; i++ ) {
assertTrue( Hibernate.isInitialized( list.get( i ).children ) ); assertTrue( Hibernate.isInitialized( list.get( i ).children ) );
assertEquals( i + 1, list.get( i ).children.size() ); assertEquals( i + 1, list.get( i ).children.size() );
}
} }
} }
); );

View File

@ -14,11 +14,11 @@ import org.hibernate.Hibernate;
import org.hibernate.annotations.BatchSize; import org.hibernate.annotations.BatchSize;
import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.EntityEntry; import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.EntityKey; import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.loader.ast.internal.MultiKeyLoadHelper;
import org.hibernate.stat.Statistics; import org.hibernate.stat.Statistics;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
@ -100,7 +100,17 @@ public class MultiLoadTest {
statementInspector.getSqlQueries().get( 0 ), statementInspector.getSqlQueries().get( 0 ),
'?' '?'
); );
assertThat( paramCount, is( 5 ) );
final Dialect dialect = session.getSessionFactory()
.getFastSessionServices()
.jdbcServices
.getDialect();
if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( paramCount, is( 1 ) );
}
else {
assertThat( paramCount, is( 5 ) );
}
} }
); );
} }
@ -314,7 +324,17 @@ public class MultiLoadTest {
statementInspector.getSqlQueries().get( 0 ), statementInspector.getSqlQueries().get( 0 ),
'?' '?'
); );
assertThat( paramCount, is( 2 ) );
final Dialect dialect = session.getSessionFactory()
.getFastSessionServices()
.jdbcServices
.getDialect();
if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( paramCount, is( 1 ) );
}
else {
assertThat( paramCount, is( 2 ) );
}
} }
); );
} }
@ -369,7 +389,17 @@ public class MultiLoadTest {
statementInspector.getSqlQueries().get( 0 ), statementInspector.getSqlQueries().get( 0 ),
'?' '?'
); );
assertThat( paramCount, is( 2 ) );
final Dialect dialect = session.getSessionFactory()
.getFastSessionServices()
.jdbcServices
.getDialect();
if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( paramCount, is( 1 ) );
}
else {
assertThat( paramCount, is( 2 ) );
}
} }
); );
} }
@ -400,7 +430,17 @@ public class MultiLoadTest {
statementInspector.getSqlQueries().get( 0 ), statementInspector.getSqlQueries().get( 0 ),
'?' '?'
); );
assertThat( paramCount, is( 2 ) );
final Dialect dialect = session.getSessionFactory()
.getFastSessionServices()
.jdbcServices
.getDialect();
if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( paramCount, is( 1 ) );
}
else {
assertThat( paramCount, is( 2 ) );
}
} }
); );
} }
@ -437,7 +477,17 @@ public class MultiLoadTest {
statementInspector.getSqlQueries().get( 0 ), statementInspector.getSqlQueries().get( 0 ),
'?' '?'
); );
assertThat( paramCount, is( 2 ) );
final Dialect dialect = session.getSessionFactory()
.getFastSessionServices()
.jdbcServices
.getDialect();
if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( paramCount, is( 1 ) );
}
else {
assertThat( paramCount, is( 2 ) );
}
} ); } );
} }
@ -467,7 +517,17 @@ public class MultiLoadTest {
statementInspector.getSqlQueries().get( 0 ), statementInspector.getSqlQueries().get( 0 ),
'?' '?'
); );
assertThat( paramCount, is( 2 ) );
final Dialect dialect = session.getSessionFactory()
.getFastSessionServices()
.jdbcServices
.getDialect();
if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( paramCount, is( 1 ) );
}
else {
assertThat( paramCount, is( 2 ) );
}
} ); } );
} }
@ -503,7 +563,17 @@ public class MultiLoadTest {
statementInspector.getSqlQueries().get( 0 ), statementInspector.getSqlQueries().get( 0 ),
'?' '?'
); );
assertThat( paramCount, is( 2 ) );
final Dialect dialect = session.getSessionFactory()
.getFastSessionServices()
.jdbcServices
.getDialect();
if ( MultiKeyLoadHelper.supportsSqlArrayType( dialect ) ) {
assertThat( paramCount, is( 1 ) );
}
else {
assertThat( paramCount, is( 2 ) );
}
} ); } );
} }

View File

@ -11,6 +11,7 @@ import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.ast.spi.AfterLoadAction; import org.hibernate.loader.ast.spi.AfterLoadAction;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor; import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.persister.entity.Loadable; import org.hibernate.persister.entity.Loadable;
import org.hibernate.query.sqm.tree.select.SqmSelectStatement; import org.hibernate.query.sqm.tree.select.SqmSelectStatement;
@ -51,7 +52,7 @@ public abstract class BaseSqmUnitTest
} }
@Override @Override
public void invokeAfterLoadActions(SharedSessionContractImplementor session, Object entity, Loadable persister) { public void invokeAfterLoadActions(Object entity, EntityMappingType entityMappingType, SharedSessionContractImplementor session) {
} }
protected SqmSelectStatement<?> interpretSelect(String hql) { protected SqmSelectStatement<?> interpretSelect(String hql) {